Lines Matching refs:z5

853   __ Clasta(z5.VnB(), p3, z5.VnB(), z0.VnB());
902 ASSERT_EQUAL_SVE(z5_expected, z5.VnD());
949 __ Compact(z5.VnD(), p1, z3.VnD());
966 ASSERT_EQUAL_SVE(z5_expected, z5.VnD());
1000 __ Index(z5.VnB(), -1, -1);
1001 __ Splice(z5.VnB(), p5, z5.VnB(), z30.VnB());
1058 ASSERT_EQUAL_SVE(z5_expected, z5.VnD());
1362 InsrHelper(&masm, z5.VnD(), z21_inputs);
1367 __ Eor(z5.VnD(), z5.VnD(), 0x0000ffff0000ffff);
1417 ASSERT_EQUAL_SVE(z5_expected, z5.VnD());
1460 __ Dup(z5.VnS(), -0x8000);
1490 ASSERT_EQUAL_SVE(0xffff8000, z5.VnS());
1818 __ Mov(z5, z1);
1828 __ Incp(z5.VnD(), p0);
1858 ASSERT_EQUAL_SVE(z5_expected, z5.VnD());
1903 __ Dup(z5.VnH(), 0);
1904 __ Incp(z5.VnH(), p15);
1926 ASSERT_EQUAL_SVE_LANE(h_lane_count, z5.VnH(), i);
1966 __ Mov(z5, z1);
1976 __ Sqincp(z5.VnD(), p0);
2006 ASSERT_EQUAL_SVE(z5_expected, z5.VnD());
2051 __ Dup(z5.VnH(), 0);
2052 __ Sqincp(z5.VnH(), p15);
2074 ASSERT_EQUAL_SVE_LANE(h_lane_count, z5.VnH(), i);
2114 __ Mov(z5, z1);
2124 __ Uqincp(z5.VnD(), p0);
2161 ASSERT_EQUAL_SVE(z5_expected, z5.VnD());
2211 __ Dup(z5.VnH(), x2);
2212 __ Uqincp(z5.VnH(), p15);
2234 ASSERT_EQUAL_SVE_LANE(0x1200 + h_lane_count, z5.VnH(), i);
2251 __ Index(z5.VnH(), -1, 42);
2305 ASSERT_EQUAL_SVE_LANE((-1 + (42 * i)) & h_mask, z5.VnH(), i);
4408 ASSERT_EQUAL_SVE(z5, z21);
5617 __ Rev(z5.VnB(), z9.VnB());
5654 core.zreg_lane(z5.GetCode(), kBRegSize, lane_count - i - 1);
5907 __ Mov(z5, z31);
5908 __ Not(z5.VnH(), pg, z5.VnH()); // destructive
5958 ASSERT_EQUAL_SVE(expected_z5, z5.VnD());
6015 __ Mov(z5, z31);
6016 __ Fneg(z5.VnD(), pg, z5.VnD()); // destructive
6062 ASSERT_EQUAL_SVE(expected_z5, z5.VnD());
6184 __ Mov(z5, z29);
6185 __ Sxtw(z5.VnD(), pg, z31.VnD());
6230 ASSERT_EQUAL_SVE(expected_z5, z5.VnD());
6270 __ Mov(z5, z31);
6271 __ Uxtw(z5.VnD(), pg, z5.VnD()); // destructive
6313 ASSERT_EQUAL_SVE(expected_z5, z5.VnD());
6358 __ Mov(z5, z29);
6359 __ Neg(z5.VnD(), z31.VnD());
6402 ASSERT_EQUAL_SVE(expected_z5, z5.VnD());
6447 __ Cpy(z5.VnH(), pg, h29);
6476 ASSERT_EQUAL_SVE(expected_h, z5.VnD());
6540 __ Cpy(z5.VnS(), pg.Zeroing(), -128);
6586 ASSERT_EQUAL_SVE(expected_z5, z5.VnD());
6646 __ Fcpy(z5.VnS(), pg.Merging(), 5.0f);
6697 ASSERT_EQUAL_SVE(expected_z5, z5.VnD());
6860 __ Index(z5.VnB(), 5, 13);
6871 __ Str(z5, SVEMemOperand(x0, -256, SVE_MUL_VL));
6905 middle[(-256 * vl) + i] = (5 + (13 * i)) & 0xff; // z5
6918 ASSERT_EQUAL_SVE(z5, z15);
7068 __ Index(z5.VnD(), 6, -2);
7072 __ St1d(z5.VnD(), p5, SVEMemOperand(x3, x4, LSL, 3));
7122 __ Mov(z22.VnD(), p5.Merging(), z5.VnD());
7189 // st1d { z5.d }, SVE_VL16
7289 __ Dup(z5.VnB(), 0);
7291 __ Mov(z5.VnB(), p0.Merging(), z15.VnB());
7387 ASSERT_EQUAL_SVE(z5, z21);
7465 __ Dup(z5.VnB(), 0);
7467 __ Mov(z5.VnB(), p7.Merging(), z11.VnB());
7559 ASSERT_EQUAL_SVE(z5, z21);
7611 __ Dup(z5.VnB(), 0);
7614 __ Mov(z5.VnB(), p0.Merging(), z11.VnB());
7762 ASSERT_EQUAL_SVE(z5, z17);
7818 __ Dup(z5.VnB(), 0);
7821 __ Mov(z5.VnB(), p7.Merging(), z11.VnB());
7975 ASSERT_EQUAL_SVE(z5, z17);
8034 __ Dup(z5.VnB(), 0);
8038 __ Mov(z5.VnB(), p0.Merging(), z12.VnB());
8223 ASSERT_EQUAL_SVE(z5, z21);
8290 __ Dup(z5.VnB(), 0);
8294 __ Mov(z5.VnB(), p7.Merging(), z21.VnB());
8484 ASSERT_EQUAL_SVE(z5, z21);
8528 __ Index(z5.VnH(), 43, 3);
8533 __ St3h(z4.VnH(), z5.VnH(), z6.VnH(), p1, SVEMemOperand(sp, x1, LSL, 1));
8562 __ Mov(z17.VnH(), p1.Merging(), z5.VnH());
8565 z5.VnH(),
8603 ASSERT_EQUAL_SVE(z5, z17);
8633 __ Index(z5.VnH(), 43, 3);
8636 __ St3h(z4.VnH(), z5.VnH(), z6.VnH(), p1, SVEMemOperand(sp, 6, SVE_MUL_VL));
8658 __ Mov(z17.VnH(), p1.Merging(), z5.VnH());
8661 z5.VnH(),
9411 __ Ldnf1b(z5.VnS(), p0.Zeroing(), SVEMemOperand(x0));
9414 __ Sel(z5.VnS(), p1, z5.VnS(), z10.VnS());
9434 ASSERT_EQUAL_SVE(z25, z5);
9482 __ Ldff1sh(z5.VnS(), all.Zeroing(), SVEMemOperand(x0, z31.VnS(), UXTW));
9570 ASSERT_EQUAL_SVE(expected_z5, z5.VnS());
9676 __ Ld1sh(z5.VnS(), all.Zeroing(), SVEMemOperand(x0, z31.VnS(), UXTW));
9750 ASSERT_EQUAL_SVE(expected_z5, z5.VnS());
10363 __ Ldnt1w(z5.VnS(), p3.Zeroing(), SVEMemOperand(x0, x1, LSL, 2));
10386 ASSERT_EQUAL_SVE(z4, z5);
10426 __ Sel(z5.VnH(), p2, z1.VnH(), z0.VnH());
10446 ASSERT_EQUAL_SVE(z4, z5);
10496 __ Mov(z5, z1);
10497 __ Ext(z5.VnB(), z5.VnB(), z5.VnB(), 16);
10498 __ Eor(z5.VnB(), z5.VnB(), z1.VnB());
10499 __ Orv(b5, p0, z5.VnB());
10500 __ Orr(z4, z4, z5);
10501 __ Mov(z5, z2);
10502 __ Ext(z5.VnB(), z5.VnB(), z5.VnB(), 16);
10503 __ Eor(z5.VnB(), z5.VnB(), z2.VnB());
10504 __ Orv(b5, p0, z5.VnB());
10505 __ Orr(z4, z4, z5);
10506 __ Mov(z5, z3);
10507 __ Ext(z5.VnB(), z5.VnB(), z5.VnB(), 16);
10508 __ Eor(z5.VnB(), z5.VnB(), z3.VnB());
10509 __ Orv(b5, p0, z5.VnB());
10510 __ Orr(z4, z4, z5);
10514 __ Ld1rqb(z5.VnB(), p1.Zeroing(), SVEMemOperand(x1, -48));
10535 ASSERT_EQUAL_SVE(z0, z5);
10575 __ Index(z5.VnB(), 15, -1);
10631 ASSERT_EQUAL_SVE(z5, z4);
11213 __ Index(z5.VnS(), 0x7878, 1);
11214 __ Sub(z5.VnS(), 0x8000, z5.VnS());
11248 ASSERT_EQUAL_SVE(expected_z5, z5.VnS());
11271 __ Fdup(z5.VnD(), 0.5f);
11294 ASSERT_EQUAL_SVE(DoubleToRawbits(0.5), z5.VnD());
11331 __ Mov(z5, z31);
11332 __ Eorv(h5, p0, z5.VnH()); // destructive
11384 ASSERT_EQUAL_SVE_LANE(0, z5.VnD(), i);
11418 __ Mov(z5, z31);
11419 __ Uaddv(h5, p0, z5.VnH()); // destructive
11457 ASSERT_EQUAL_SVE_LANE(0, z5.VnD(), i);
11488 __ Mov(z5, z31);
11489 __ Uminv(h5, p0, z5.VnH()); // destructive
11530 ASSERT_EQUAL_SVE_LANE(0, z5.VnD(), i);
11560 __ Mov(z5, z31);
11561 __ Umaxv(h5, p0, z5.VnH()); // destructive
11602 ASSERT_EQUAL_SVE_LANE(0, z5.VnD(), i);
11657 ZRegister dnm_result = z5.WithLaneSize(lane_size_in_bits);
11962 __ Dup(z5.VnS(), 0);
11973 __ Dup(z5.VnS(), 0);
11983 __ Udot(z5.VnS(), z5.VnS(), z1.VnB(), z0.VnB(), 3);
11984 __ Mul(z5.VnS(), z5.VnS(), 8);
12018 ASSERT_EQUAL_SVE(z2.VnS(), z5.VnS());
12999 __ Lsr(z5.VnH(), p0.Merging(), z0.VnH(), z1.VnH());
13033 ASSERT_EQUAL_SVE(expected_z5, z5.VnD());
13073 __ Lsr(z5.VnH(), p0.Merging(), z0.VnH(), z1.VnD());
13093 ASSERT_EQUAL_SVE(expected_z5, z5.VnD());
13127 __ Asr(z5.VnH(), p0.Merging(), z4.VnH(), 3);
13153 ASSERT_EQUAL_SVE(expected_z5, z5.VnD());
13189 __ Mov(z5, z31);
13190 __ Asrd(z5.VnH(), p3.Merging(), z5.VnH(), 2);
13222 ASSERT_EQUAL_SVE(expected_z5, z5.VnD());
13605 __ Dup(z5.VnB(), 0x42);
13606 __ Rbit(z5.VnB(), p2.Merging(), z0.VnB());
13626 ASSERT_EQUAL_SVE(expected_z5, z5.VnD());
13647 __ Revh(z5.VnD(), p1.Merging(), z0.VnD());
13669 ASSERT_EQUAL_SVE(expected_z5, z5.VnD());
13721 InsrHelper(&masm, z5.VnD(), in5);
13728 __ Fexpa(z11.VnH(), z5.VnH());
14412 __ Trn2(z5.VnH(), z0.VnH(), z1.VnH());
14429 ASSERT_EQUAL_SVE(expected_z5, z5.VnD());
14453 __ Zip2(z5.VnH(), z0.VnH(), z1.VnH());
14461 __ Uzp1(z12.VnH(), z4.VnH(), z5.VnH());
14462 __ Uzp2(z13.VnH(), z4.VnH(), z5.VnH());
14479 ASSERT_EQUAL_SVE(expected_z5, z5.VnD());
14535 __ Mov(z5, z0);
14536 __ Fcadd(z5.VnH(), p2.Merging(), z5.VnH(), z1.VnH(), 90);
14537 __ Fcadd(z5.VnH(), p3.Merging(), z5.VnH(), z1.VnH(), 270);
14600 ASSERT_EQUAL_SVE(z6.VnH(), z5.VnH());
14644 __ Dup(z5.VnH(), 0);
14645 __ Fcmla(z5.VnH(), z0.VnH(), z3.VnH(), 1, 180);
14646 __ Fcmla(z5.VnH(), z0.VnH(), z3.VnH(), 1, 270);
14647 __ Fneg(z5.VnH(), p0.Merging(), z5.VnH());
14683 ASSERT_EQUAL_SVE(z6.VnH(), z5.VnH());
14719 __ Dup(z5.VnH(), 0);
14720 __ Fcmla(z5.VnH(), p3.Merging(), z5.VnH(), z4.VnH(), z3.VnH(), 0);
14721 __ Fcmla(z5.VnH(), p3.Merging(), z5.VnH(), z4.VnH(), z3.VnH(), 90);
14726 __ Ext(z5.VnB(), z5.VnB(), z5.VnB(), 4);
14737 // computed earlier in z5.
14787 ASSERT_EQUAL_SVE(z5.VnH(), z6.VnH());
14827 __ Fmul(z5.VnH(), z1.VnH(), z0.VnH(), 7);
14878 ASSERT_EQUAL_SVE(z15.VnH(), z5.VnH());
14915 InsrHelper(&masm, z5.VnD(), in_d1);
14932 __ Ftmad(z12.VnD(), z12.VnD(), z5.VnD(), 0);
14934 __ Ftmad(z13.VnD(), z13.VnD(), z5.VnD(), 5);
14936 __ Ftmad(z14.VnD(), z14.VnD(), z5.VnD(), 7);
15008 masm->Mov(z5, z0);
15009 masm->Fabd(z5.WithLaneSize(ls),
15011 z5.WithLaneSize(ls),
15067 ASSERT_EQUAL_SVE(expected_z5, z5.VnD());
15100 ASSERT_EQUAL_SVE(expected_z5, z5.VnD());
15133 ASSERT_EQUAL_SVE(expected_z5, z5.VnD());
15164 __ Mov(z5, z4);
15165 __ Fmul(z5.VnH(), p0m, z5.VnH(), 2.0);
15170 __ Mov(z8, z5);
15172 __ Mov(z9, z5);
15231 ASSERT_EQUAL_SVE(expected_z5, z5.VnD());
15301 __ Mov(z5, z0);
15302 __ Fscale(z5.VnH(), p1.Merging(), z5.VnH(), z3.VnH());
15329 ASSERT_EQUAL_SVE(expected_z5, z5.VnD());
16324 __ Mov(z5, z0);
16331 __ Ext(z5, z5, z0, 47);
16369 ASSERT_EQUAL_SVE(z5, z0);
16372 ASSERT_EQUAL_SVE(z5_expected, z5.VnD());
16375 ASSERT_EQUAL_SVE(z5_expected, z5.VnD());
16505 __ Fdup(z5.VnD(), Float16(1));
16506 __ Fscale(z5.VnD(), p0.Merging(), z5.VnD(), z0.VnD());
16507 __ Insr(z5.VnD(), 0);
16508 __ Frsqrte(z6.VnD(), z5.VnD());
16509 __ Frecpe(z5.VnD(), z5.VnD());
16525 ASSERT_EQUAL_SVE(z5_expected, z5.VnD());
16554 __ Fdup(z5.VnD(), Float16(1));
16555 __ Fscale(z5.VnD(), p0.Merging(), z5.VnD(), z0.VnD());
16557 __ Insr(z5.VnD(), 0);
16558 __ Frsqrts(z6.VnD(), z5.VnD(), z0.VnD());
16559 __ Frecps(z5.VnD(), z5.VnD(), z0.VnD());
16575 ASSERT_EQUAL_SVE(z5_expected, z5.VnD());
16600 __ Ftsmul(z5.VnS(), z0.VnS(), z1.VnS());
16620 ASSERT_EQUAL_SVE(z5_expected, z5.VnD());
16965 __ Mov(z5, z2);
16966 (masm.*macro_idx)(z5.VnH(), z5.VnH(), z1.VnH(), z0.VnH(), 4); // zd == za
17040 ASSERT_EQUAL_SVE(z17.VnH(), z5.VnH());
18437 __ Adr(z5.VnD(), SVEMemOperand(z0.VnD(), z1.VnD(), LSL, 2));
18475 ASSERT_EQUAL_SVE(expected_z5, z5.VnD());
18724 __ Index(z5.VnS(), 3, 6);
18726 __ Uaba(z5.VnS(), z5.VnS(), z5.VnS(), z6.VnS());
18756 ASSERT_EQUAL_SVE(z5, z6);
18822 __ Sqdmullb(z5.VnD(), z8.VnS(), z7.VnS(), 2);
18834 ASSERT_EQUAL_SVE(sqdmullb_idx_expected_d, z5.VnD());
18956 __ Ld1w(z5.VnD(), p3.Zeroing(), SVEMemOperand(x1, z30.VnD()));
18980 ASSERT_EQUAL_SVE(z4, z5);
19034 __ Sel(z5.VnB(), p5.Merging(), z1.VnB(), z0.VnB());
19057 ASSERT_EQUAL_SVE(z4, z5);
19385 InsrHelper(&masm, z5.VnS(), za_inputs_s);
19387 __ Sqrdmlah(z5.VnS(), z5.VnS(), z3.VnS(), z4.VnS());
19400 ASSERT_EQUAL_SVE(zd_expected_s, z5.VnS());
19437 __ Cmla(z5.VnS(), z28.VnS(), z31.VnS(), z30.VnS(), 90);
19452 ASSERT_EQUAL_SVE(z5, z1);
19571 __ Fmlalt(z5.VnS(), z29.VnS(), z31.VnH(), z30.VnH());
19586 ASSERT_EQUAL_SVE(z5, z1);
19662 __ Ummla(z5.VnS(), z5.VnS(), z0.VnB(), z1.VnB());
19673 ASSERT_EQUAL_SVE(z5, z4);
19695 __ Usdot(z5.VnS(), z0.VnS(), z1.VnB(), z2.VnB());
19720 ASSERT_EQUAL_SVE(z6, z5);
19997 __ Mov(z5.VnD(), 0);
19999 __ Fmmla(z5.VnD(), z5.VnD(), z4.VnD(), z3.VnD());
20010 ASSERT_EQUAL_SVE(z4, z5);
20110 __ Ld1row(z5.VnS(), p2.Zeroing(), SVEMemOperand(x0, x1, LSL, 2));
20151 ASSERT_EQUAL_SVE(z4, z5);