Lines Matching refs:z13

313   __ Index(z13.VnB(), 0, -1);
364 ASSERT_EQUAL_SVE_LANE(0x00, z13.VnB(), i);
405 ZRegister mla_d_result = z13.WithLaneSize(lane_size_in_bits);
787 __ Dup(z13.VnB(), -1);
865 __ Dup(z13.VnB(), 0xff);
866 __ Clasta(z13.VnS(), p1, z13.VnS(), z0.VnS());
907 ASSERT_EQUAL_SVE(z13_expected, z13.VnD());
1149 InsrHelper(&masm, z13.VnD(), z13_inputs);
1152 __ Cmphi(p7.VnD(), p1.Zeroing(), z12.VnD(), z13.VnD());
1184 __ Cmplo(p13.VnD(), p1.Zeroing(), z13.VnD(), z12.VnD()); // HI
1235 InsrHelper(&masm, z13.VnB(), src1_inputs_1);
1239 __ Cmpge(p2.VnB(), p0.Zeroing(), z13.VnB(), z19.VnD());
1241 __ Cmpgt(p3.VnB(), p0.Zeroing(), z13.VnB(), z19.VnD());
1247 InsrHelper(&masm, z13.VnH(), src1_inputs_2);
1251 __ Cmple(p4.VnH(), p0.Zeroing(), z13.VnH(), z19.VnD());
1253 __ Cmplt(p5.VnH(), p0.Zeroing(), z13.VnH(), z19.VnD());
1259 InsrHelper(&masm, z13.VnS(), src1_inputs_3);
1263 __ Cmpeq(p6.VnS(), p0.Zeroing(), z13.VnS(), z19.VnD());
1265 __ Cmpne(p7.VnS(), p0.Zeroing(), z13.VnS(), z19.VnD());
1271 InsrHelper(&masm, z13.VnB(), src1_inputs_4);
1275 __ Cmplo(p8.VnB(), p0.Zeroing(), z13.VnB(), z19.VnD());
1277 __ Cmpls(p9.VnB(), p0.Zeroing(), z13.VnB(), z19.VnD());
1283 InsrHelper(&masm, z13.VnS(), src1_inputs_5);
1287 __ Cmphi(p10.VnS(), p0.Zeroing(), z13.VnS(), z19.VnD());
1289 __ Cmphs(p11.VnS(), p0.Zeroing(), z13.VnS(), z19.VnD());
1386 __ dupm(z13.VnD(), 0x7ffffff800000000);
1438 ASSERT_EQUAL_SVE(z13_expected, z13.VnD());
1809 __ Decp(z13.VnH(), p0, z3.VnH());
1872 ASSERT_EQUAL_SVE(z3_expected, z13.VnH());
1957 __ Sqdecp(z13.VnH(), p0, z3.VnH());
2020 ASSERT_EQUAL_SVE(z3_expected, z13.VnH());
2105 __ Uqdecp(z13.VnH(), p0, z3.VnH());
2176 ASSERT_EQUAL_SVE(z3_expected, z13.VnH());
2262 __ Index(z13.VnH(), w0, x1);
2322 ASSERT_EQUAL_SVE_LANE((42 - (3 * i)) & h_mask, z13.VnH(), i);
2512 InsrHelper(&masm, z13.VnB(), z13_inputs);
2515 __ Cmpeq(p2.VnB(), p0.Zeroing(), z13.VnB(), -15);
2517 __ Cmpeq(p3.VnB(), p0.Zeroing(), z13.VnB(), -127);
2610 InsrHelper(&masm, z13.VnB(), src1_inputs);
2613 __ Cmphi(p2.VnB(), p0.Zeroing(), z13.VnB(), 0x0f);
2615 __ Cmphi(p3.VnB(), p0.Zeroing(), z13.VnB(), 0xf0);
2619 InsrHelper(&masm, z13.VnH(), src2_inputs);
2622 __ Cmphs(p4.VnH(), p0.Zeroing(), z13.VnH(), 0x1f);
2624 __ Cmphs(p5.VnH(), p0.Zeroing(), z13.VnH(), 0x1fff);
2628 InsrHelper(&masm, z13.VnS(), src3_inputs);
2631 __ Cmplo(p6.VnS(), p0.Zeroing(), z13.VnS(), 0x3f);
2633 __ Cmplo(p7.VnS(), p0.Zeroing(), z13.VnS(), 0x3f3f3f3f);
2637 InsrHelper(&masm, z13.VnD(), src4_inputs);
2640 __ Cmpls(p8.VnD(), p0.Zeroing(), z13.VnD(), 0x2f);
2642 __ Cmpls(p9.VnD(), p0.Zeroing(), z13.VnD(), 0x800000000000000);
4416 ASSERT_EQUAL_SVE(z13, z29);
5627 __ Dup(z13.VnD(), z9.VnD(), index[3]);
5707 ASSERT_EQUAL_SVE_LANE(expected_z13, z13.VnD(), i);
5750 __ Sunpklo(z13.VnH(), z9.VnB());
5801 uint16_t expected = core.zreg_lane<uint16_t>(z13.GetCode(), i);
5867 ASSERT_EQUAL_SVE(z13, z22);
6656 __ Fcpy(z13.VnH(), pg.Merging(), kFP64NegativeInfinity);
6745 ASSERT_EQUAL_SVE(expected_z13, z13.VnD());
6750 ASSERT_EQUAL_SVE(z13.VnD(), z17.VnD());
6882 __ Ldr(z13, SVEMemOperand(x0, -3, SVE_MUL_VL));
6916 ASSERT_EQUAL_SVE(z3, z13);
7091 __ Ld1w(z13.VnS(), p6.Zeroing(), SVEMemOperand(x0, 42, SVE_MUL_VL));
7124 // Ld1w(z13.VnS(), ...)
7222 ASSERT_EQUAL_SVE(z23, z13);
7428 __ Index(z13.VnH(), 7, -2);
7431 __ St2h(z12.VnH(), z13.VnH(), p6, SVEMemOperand(x0, x2, LSL, 1));
7473 __ Mov(z7.VnH(), p6.Merging(), z13.VnH());
7521 // st2h { z12.h, z13.h }, SVE_VL16
7661 __ Dup(z13.VnD(), 0);
7664 __ Mov(z13.VnD(), p3.Merging(), z0.VnD());
7776 ASSERT_EQUAL_SVE(z13, z25);
7824 __ Index(z13.VnH(), 6, -2);
7829 __ St3h(z13.VnH(), z14.VnH(), z15.VnH(), p6, SVEMemOperand(x0, x2, LSL, 1));
7834 __ Mov(z7.VnH(), p6.Merging(), z13.VnH());
7867 __ Dup(z13.VnD(), 0);
7870 __ Mov(z13.VnD(), p4.Merging(), z31.VnD());
7931 // st3h { z13.h, z14.h, z15.h }, SVE_VL16
7989 ASSERT_EQUAL_SVE(z13, z25);
8028 __ Index(z13.VnB(), 4, -7);
8030 __ St4b(z10.VnB(), z11.VnB(), z12.VnB(), z13.VnB(), p0, SVEMemOperand(x0));
8039 __ Mov(z6.VnB(), p0.Merging(), z13.VnB());
8078 __ Dup(z13.VnS(), 0);
8082 __ Mov(z13.VnS(), p2.Merging(), z31.VnS());
8161 // st2b { z10.b, z11.b, z12.b, z13.b }, SVE_ALL
8235 ASSERT_EQUAL_SVE(z13, z29);
8342 __ Dup(z13.VnS(), 0);
8346 __ Mov(z13.VnS(), p5.Merging(), z31.VnS());
8496 ASSERT_EQUAL_SVE(z13, z29);
8553 __ Dup(z13.VnB(), 0);
8555 __ Mov(z13.VnB(), p0.Merging(), z1.VnB());
8599 ASSERT_EQUAL_SVE(z1, z13);
8649 __ Dup(z13.VnB(), 0);
8651 __ Mov(z13.VnB(), p0.Merging(), z1.VnB());
9504 __ Ldff1sh(z13.VnD(), all.Zeroing(), SVEMemOperand(x0, z30.VnD(), SXTW));
9593 ASSERT_EQUAL_SVE(expected_z13, z13.VnD());
9698 __ Ld1sh(z13.VnD(), all.Zeroing(), SVEMemOperand(x0, z30.VnD(), SXTW));
9773 ASSERT_EQUAL_SVE(expected_z13, z13.VnD());
10376 __ Ldnt1w(z13.VnS(), p3.Zeroing(), SVEMemOperand(x0, 7, SVE_MUL_VL));
10390 ASSERT_EQUAL_SVE(z12, z13);
10660 ZRegister zn_ld_d = z13.WithLaneSize(esize_in_bits);
11280 __ Fdup(z13.VnS(), kFP32NegativeInfinity);
11302 ASSERT_EQUAL_SVE(FloatToRawbits(kFP32NegativeInfinity), z13.VnS());
12061 __ Dup(z13.VnD(), 0);
12068 __ Sdot(z13.VnD(), z13.VnD(), z1.VnH(), z10.VnH(), 1);
12069 __ Mul(z13.VnD(), z13.VnD(), multiplier);
12086 ASSERT_EQUAL_SVE(z12.VnD(), z13.VnD());
13012 __ Asr(z13.VnD(), p0.Merging(), z31.VnD(), z1.VnD());
13047 ASSERT_EQUAL_SVE(expected_z13, z13.VnD());
13203 __ Mov(z13, z31);
13204 __ Asrd(z13.VnD(), p5.Merging(), z13.VnD(), 2);
13238 ASSERT_EQUAL_SVE(expected_z13, z13.VnD());
14462 __ Uzp2(z13.VnH(), z4.VnH(), z5.VnH());
14493 ASSERT_EQUAL_SVE(z1.VnD(), z13.VnD());
14589 __ Mov(z13, z0);
14590 __ Fcadd(z13.VnD(), p2.Merging(), z13.VnD(), z1.VnD(), 90);
14591 __ Fcadd(z13.VnD(), p3.Merging(), z13.VnD(), z1.VnD(), 270);
14606 ASSERT_EQUAL_SVE(z14.VnD(), z13.VnD());
14841 __ Dup(z13.VnH(), z25.VnH(), 1);
14842 FPSegmentPatternHelper(&masm, z13.VnH(), p0.Merging(), z13.VnH());
14843 __ Fmul(z13.VnH(), z1.VnH(), z13.VnH());
14876 ASSERT_EQUAL_SVE(z13.VnH(), z3.VnH());
14933 __ Mov(z13, z4);
14934 __ Ftmad(z13.VnD(), z13.VnD(), z5.VnD(), 5);
14973 ASSERT_EQUAL_SVE(expected_z13, z13.VnD());
15179 __ Mov(z13, z12);
15180 __ Fsub(z13.VnS(), p0m, z13.VnS(), 1.0);
15181 __ Mov(z14, z13);
15244 ASSERT_EQUAL_SVE(expected_z13, z13.VnD());
16688 ZRegister d_result = z13.WithLaneSize(lane_size_in_bits);
16985 __ Mov(z13, z2);
16986 (masm.*macro_idx)(z13.VnD(), z13.VnD(), z1.VnD(), z0.VnD(), 0); // zd == za
17050 ASSERT_EQUAL_SVE(z11.VnD(), z13.VnD());
17794 ZRegister zt_fp_3 = z13.WithLaneSize(lane_size_in_bits);
18445 __ Adr(z13.VnD(), SVEMemOperand(z0.VnD(), z2.VnD(), SXTW, 2));
18483 ASSERT_EQUAL_SVE(expected_z13, z13.VnD());
18738 __ Index(z13.VnH(), 0, -1);
18739 __ Saba(z13.VnH(), z13.VnH(), z12.VnH(), z13.VnH());
18760 ASSERT_EQUAL_SVE(z12, z13);
18972 __ Ld1sw(z13.VnD(), p3.Zeroing(), SVEMemOperand(x1, z30.VnD()));
18984 ASSERT_EQUAL_SVE(z12, z13);
19223 __ Mov(z13, z31);
19224 __ Sqrdcmlah(z13.VnS(), z13.VnS(), z0.VnS(), z1.VnS(), 270);
19245 ASSERT_EQUAL_SVE(zd_270_expected, z13.VnS());
19250 ASSERT_EQUAL_SVE(z17, z13);
19705 __ Usdot(z13.VnS(), z4.VnS(), z10.VnB(), z11.VnB());
19725 ASSERT_EQUAL_SVE(z13_expected, z13.VnD());