Lines Matching refs:z6

854   __ Clastb(z6.VnB(), p3, z6.VnB(), z0.VnB());
903 ASSERT_EQUAL_SVE(z6_expected, z6.VnD());
950 __ Compact(z6.VnD(), p4, z3.VnD());
967 ASSERT_EQUAL_SVE(z6_expected, z6.VnD());
1002 __ Index(z6.VnB(), -1, -1);
1003 __ Splice(z6.VnB(), p6, z6.VnB(), z30.VnB());
1059 ASSERT_EQUAL_SVE(z6_expected, z6.VnD());
1363 InsrHelper(&masm, z6.VnS(), z22_inputs);
1368 __ Eor(z6.VnS(), z6.VnS(), 0xff0000ff);
1418 ASSERT_EQUAL_SVE(z6_expected, z6.VnS());
1461 __ Dup(z6.VnH(), 0x7f00);
1491 ASSERT_EQUAL_SVE(0x7f00, z6.VnH());
1819 __ Mov(z6, z2);
1829 __ Incp(z6.VnS(), p0);
1862 ASSERT_EQUAL_SVE(z6_expected, z6.VnS());
1967 __ Mov(z6, z2);
1977 __ Sqincp(z6.VnS(), p0);
2010 ASSERT_EQUAL_SVE(z6_expected, z6.VnS());
2115 __ Mov(z6, z2);
2125 __ Uqincp(z6.VnS(), p0);
2166 ASSERT_EQUAL_SVE(z6_expected, z6.VnS());
2252 __ Index(z6.VnS(), 42, 42);
2308 ASSERT_EQUAL_SVE_LANE((42 + (42 * i)) & s_mask, z6.VnS(), i);
4409 ASSERT_EQUAL_SVE(z6, z22);
5618 __ Rev(z6.VnH(), z9.VnH());
5662 core.zreg_lane(z6.GetCode(), kHRegSize, lane_count - i - 1);
5909 __ Mov(z6, z29);
5910 __ Not(z6.VnS(), pg, z31.VnS());
5964 ASSERT_EQUAL_SVE(expected_z6, z6.VnD());
6448 __ Cpy(z6.VnS(), pg, s30);
6482 ASSERT_EQUAL_SVE(expected_s, z6.VnD());
6541 __ Cpy(z6.VnD(), pg.Merging(), -1);
6591 ASSERT_EQUAL_SVE(expected_z6, z6.VnD());
6647 __ Fcpy(z6.VnS(), pg.Merging(), 6.0);
6703 ASSERT_EQUAL_SVE(expected_z6, z6.VnD());
6861 __ Index(z6.VnB(), 6, 2);
6874 __ Str(z6, SVEMemOperand(x0, 6 * vl));
6906 middle[(6 * vl) + i] = (6 + (2 * i)) & 0xff; // z6
6919 ASSERT_EQUAL_SVE(z6, z16);
7075 __ Index(z6.VnS(), -7, 3);
7079 __ St1w(z6.VnS(), p6, SVEMemOperand(x0, 42, SVE_MUL_VL));
7126 __ Mov(z23.VnS(), p6.Merging(), z6.VnS());
7201 // st1w { z6.s }, SVE_ALL
7294 __ Dup(z6.VnH(), 0);
7296 __ Mov(z6.VnH(), p1.Merging(), z16.VnH());
7390 ASSERT_EQUAL_SVE(z6, z22);
7470 __ Dup(z6.VnH(), 0);
7472 __ Mov(z6.VnH(), p6.Merging(), z12.VnH());
7562 ASSERT_EQUAL_SVE(z6, z22);
7612 __ Dup(z6.VnB(), 0);
7615 __ Mov(z6.VnB(), p0.Merging(), z12.VnB());
7763 ASSERT_EQUAL_SVE(z6, z18);
7819 __ Dup(z6.VnB(), 0);
7822 __ Mov(z6.VnB(), p7.Merging(), z12.VnB());
7976 ASSERT_EQUAL_SVE(z6, z18);
8035 __ Dup(z6.VnB(), 0);
8039 __ Mov(z6.VnB(), p0.Merging(), z13.VnB());
8224 ASSERT_EQUAL_SVE(z6, z22);
8291 __ Dup(z6.VnB(), 0);
8295 __ Mov(z6.VnB(), p7.Merging(), z22.VnB());
8485 ASSERT_EQUAL_SVE(z6, z22);
8529 __ Index(z6.VnH(), 44, 3);
8533 __ St3h(z4.VnH(), z5.VnH(), z6.VnH(), p1, SVEMemOperand(sp, x1, LSL, 1));
8563 __ Mov(z18.VnH(), p1.Merging(), z6.VnH());
8566 z6.VnH(),
8604 ASSERT_EQUAL_SVE(z6, z18);
8634 __ Index(z6.VnH(), 44, 3);
8636 __ St3h(z4.VnH(), z5.VnH(), z6.VnH(), p1, SVEMemOperand(sp, 6, SVE_MUL_VL));
8659 __ Mov(z18.VnH(), p1.Merging(), z6.VnH());
8662 z6.VnH(),
9419 __ Ldnf1sb(z6.VnH(), p0.Zeroing(), SVEMemOperand(x0));
9422 __ Sel(z6.VnH(), p1, z6.VnH(), z10.VnH());
9435 ASSERT_EQUAL_SVE(z26, z6);
9485 __ Ldff1h(z6.VnS(), all.Zeroing(), SVEMemOperand(x0, z31.VnS(), UXTW, 1));
9577 ASSERT_EQUAL_SVE(expected_z6, z6.VnS());
9679 __ Ld1h(z6.VnS(), all.Zeroing(), SVEMemOperand(x0, z31.VnS(), UXTW, 1));
9757 ASSERT_EQUAL_SVE(expected_z6, z6.VnS());
10366 __ Ld1d(z6.VnD(), p4.Zeroing(), SVEMemOperand(x0, x1, LSL, 3));
10387 ASSERT_EQUAL_SVE(z6, z7);
10432 __ Ld1b(z6.VnB(), p0.Zeroing(), SVEMemOperand(x0, x1, LSL, 2));
10447 ASSERT_EQUAL_SVE(z6, z7);
10516 __ Ld1rqh(z6.VnH(), p2.Zeroing(), SVEMemOperand(x1, -32));
10536 ASSERT_EQUAL_SVE(z1, z6);
10632 ASSERT_EQUAL_SVE(z7, z6);
11215 __ Index(z6.VnH(), 0x30f0, -1);
11216 __ Sub(z6.VnH(), 0x7f00, z6.VnH());
11251 ASSERT_EQUAL_SVE(expected_z6, z6.VnH());
11272 __ Fdup(z6.VnH(), 1.0);
11295 ASSERT_EQUAL_SVE(0x3c00, z6.VnH());
11385 ASSERT_EQUAL_SVE_LANE(0, z6.VnD(), i);
11458 ASSERT_EQUAL_SVE_LANE(0, z6.VnD(), i);
11531 ASSERT_EQUAL_SVE_LANE(0, z6.VnD(), i);
11603 ASSERT_EQUAL_SVE_LANE(0, z6.VnD(), i);
11658 ZRegister da_result = z6.WithLaneSize(lane_size_in_bits);
11992 __ Neg(z6.VnB(), p0.Merging(), z0.VnB());
11993 __ Sdot(z7.VnS(), z7.VnS(), z1.VnB(), z6.VnB(), 0);
11995 __ Sdot(z8.VnS(), z8.VnS(), z1.VnB(), z6.VnB(), 1);
11998 __ Sdot(z9.VnS(), z9.VnS(), z1.VnB(), z6.VnB(), 2);
12001 __ Sdot(z10.VnS(), z10.VnS(), z1.VnB(), z6.VnB(), 3);
12998 __ Dup(z6.VnB(), 0x55);
13000 __ Lsl(z6.VnH(), p3.Merging(), z0.VnH(), z1.VnH());
13035 ASSERT_EQUAL_SVE(expected_z6, z6.VnD());
13072 __ Dup(z6.VnB(), 0x55);
13074 __ Lsl(z6.VnH(), p3.Merging(), z6.VnH(), z1.VnD());
13095 ASSERT_EQUAL_SVE(expected_z6, z6.VnD());
13129 __ Lsr(z6.VnS(), p0.Merging(), z31.VnS(), 3);
13130 __ Mov(z7, z6);
13155 ASSERT_EQUAL_SVE(expected_z6, z6.VnD());
13191 __ Asrd(z6.VnH(), p0.Merging(), z31.VnH(), 15);
13224 ASSERT_EQUAL_SVE(expected_z6, z6.VnD());
13607 __ Dup(z6.VnB(), 0x42);
13608 __ Rbit(z6.VnS(), p2.Merging(), z0.VnS());
13628 ASSERT_EQUAL_SVE(expected_z6, z6.VnD());
13648 __ Revw(z6.VnD(), p1.Merging(), z0.VnD());
13671 ASSERT_EQUAL_SVE(expected_z6, z6.VnD());
13723 __ Fexpa(z6.VnD(), z0.VnD());
13735 ASSERT_EQUAL_SVE(expected_z6, z6.VnD());
14413 __ Trn1(z6.VnS(), z0.VnS(), z1.VnS());
14431 ASSERT_EQUAL_SVE(expected_z6, z6.VnD());
14454 __ Zip1(z6.VnS(), z0.VnS(), z1.VnS());
14463 __ Uzp1(z14.VnS(), z6.VnS(), z7.VnS());
14464 __ Uzp2(z15.VnS(), z6.VnS(), z7.VnS());
14481 ASSERT_EQUAL_SVE(expected_z6, z6.VnD());
14540 __ Fsub(z6.VnH(), z0.VnH(), z1.VnH());
14599 ASSERT_EQUAL_SVE(z6.VnH(), z4.VnH());
14600 ASSERT_EQUAL_SVE(z6.VnH(), z5.VnH());
14650 __ Dup(z6.VnH(), 0);
14651 __ Fcmla(z6.VnH(), p0.Merging(), z6.VnH(), z0.VnH(), z2.VnH(), 0);
14652 __ Fcmla(z6.VnH(), p0.Merging(), z6.VnH(), z0.VnH(), z2.VnH(), 90);
14682 ASSERT_EQUAL_SVE(z6.VnH(), z4.VnH());
14683 ASSERT_EQUAL_SVE(z6.VnH(), z5.VnH());
14732 __ Dup(z6.VnH(), 0);
14733 __ Fcmla(z6.VnH(), p2.Merging(), z6.VnH(), z4.VnH(), z3.VnH(), 180);
14734 __ Fcmla(z6.VnH(), p2.Merging(), z6.VnH(), z4.VnH(), z3.VnH(), 270);
14736 // Negate the even results. The results in z6 should now match the results
14740 __ Fneg(z6.VnH(), p2.Merging(), z6.VnH());
14787 ASSERT_EQUAL_SVE(z5.VnH(), z6.VnH());
14829 __ Fmul(z6.VnS(), z1.VnS(), z0.VnS(), 0);
14879 ASSERT_EQUAL_SVE(z16.VnS(), z6.VnS());
14917 __ Mov(z6, z0);
14918 __ Ftmad(z6.VnH(), z6.VnH(), z1.VnH(), 0);
14945 ASSERT_EQUAL_SVE(expected_z6, z6.VnD());
15014 masm->Mov(z6, z0);
15015 masm->Fmul(z6.WithLaneSize(ls),
15017 z6.WithLaneSize(ls),
15069 ASSERT_EQUAL_SVE(expected_z6, z6.VnD());
15102 ASSERT_EQUAL_SVE(expected_z6, z6.VnD());
15135 ASSERT_EQUAL_SVE(expected_z6, z6.VnD());
15166 __ Mov(z6, z1);
15167 __ Fminnm(z6.VnH(), p0m, z6.VnH(), 0.0);
15233 ASSERT_EQUAL_SVE(expected_z6, z6.VnD());
15305 __ Mov(z6, z1);
15306 __ Fscale(z6.VnS(), p0.Merging(), z6.VnS(), z3.VnS());
15332 ASSERT_EQUAL_SVE(expected_z6, z6.VnD());
16325 __ Mov(z6, z0);
16332 __ Ext(z6, z6, z0, 255);
16379 ASSERT_EQUAL_SVE(z6, z0);
16382 ASSERT_EQUAL_SVE(z6_expected, z6.VnD());
16508 __ Frsqrte(z6.VnD(), z5.VnD());
16527 ASSERT_EQUAL_SVE(z6_expected, z6.VnD());
16558 __ Frsqrts(z6.VnD(), z5.VnD(), z0.VnD());
16577 ASSERT_EQUAL_SVE(z6_expected, z6.VnD());
16601 __ Ftsmul(z6.VnS(), z2.VnS(), z1.VnS());
16622 ASSERT_EQUAL_SVE(z6_expected, z6.VnD());
16967 (masm.*macro_idx)(z6.VnH(), z2.VnH(), z1.VnH(), z0.VnH(), 7);
17041 ASSERT_EQUAL_SVE(z18.VnH(), z6.VnH());
18438 __ Adr(z6.VnD(), SVEMemOperand(z0.VnD(), z1.VnD(), LSL, 3));
18476 ASSERT_EQUAL_SVE(expected_z6, z6.VnD());
18642 __ Prfw(PLDL2STRM, p1, SVEMemOperand(x2, z6.VnS(), SXTW, 2));
18725 __ Index(z6.VnS(), 5, 6);
18726 __ Uaba(z5.VnS(), z5.VnS(), z5.VnS(), z6.VnS());
18756 ASSERT_EQUAL_SVE(z5, z6);
18823 __ Sqdmullt(z6.VnD(), z8.VnS(), z7.VnS(), 0);
18835 ASSERT_EQUAL_SVE(sqdmullt_idx_expected_d, z6.VnD());
18959 __ Ldnt1d(z6.VnD(), p4.Zeroing(), SVEMemOperand(z30.VnD(), x1));
18981 ASSERT_EQUAL_SVE(z6, z7);
19040 __ Ld1b(z6.VnB(), p0.Zeroing(), SVEMemOperand(x0, x4));
19058 ASSERT_EQUAL_SVE(z6, z7);
19389 InsrHelper(&masm, z6.VnD(), zn_inputs_d);
19393 __ Sqrdmlah(z8.VnD(), z8.VnD(), z6.VnD(), z7.VnD());
19438 __ Cmla(z6.VnS(), z27.VnS(), z31.VnS(), z30.VnS(), 180);
19453 ASSERT_EQUAL_SVE(z6, z2);
19572 __ Fmlslb(z6.VnS(), z29.VnS(), z31.VnH(), z30.VnH());
19587 ASSERT_EQUAL_SVE(z6, z2);
19674 ASSERT_EQUAL_SVE(z7, z6);
19696 __ Udot(z6.VnS(), z0.VnS(), z1.VnB(), z2.VnB());
19720 ASSERT_EQUAL_SVE(z6, z5);
20113 __ Ld1rod(z6.VnD(), p2.Zeroing(), SVEMemOperand(x0, 128));
20136 __ Mov(z8, z6);
20138 __ Eor(z8.VnB(), z8.VnB(), z6.VnB());
20152 ASSERT_EQUAL_SVE(z6, z7);
20192 ASSERT_EQUAL_SVE(z6_expected, z6.VnD());