Lines Matching refs:z10
310 __ Index(z10.VnB(), 0, -1);
361 ASSERT_EQUAL_SVE_LANE(0x00, z10.VnB(), i);
402 ZRegister mla_da_result = z10.WithLaneSize(lane_size_in_bits);
861 __ Clastb(z10.VnH(), p3, z10.VnH(), z0.VnH());
917 ASSERT_EQUAL_SVE(z10_expected_vl128, z10.VnD());
921 ASSERT_EQUAL_SVE(z10_expected_vl_long, z10.VnD());
1023 __ Splice(z10.VnS(), p3, z29.VnS(), z30.VnS());
1063 ASSERT_EQUAL_SVE(z10_expected, z10.VnD());
1138 InsrHelper(&masm, z10.VnB(), z10_inputs);
1142 __ Cmphs(p6.VnB(), p0.Zeroing(), z10.VnB(), z11.VnB());
1183 __ Cmpls(p12.VnB(), p0.Zeroing(), z11.VnB(), z10.VnB()); // HS
1373 InsrHelper(&masm, z10.VnS(), z22_inputs);
1378 __ Orr(z10.VnS(), z10.VnS(), 0xff0000ff);
1430 ASSERT_EQUAL_SVE(z10_expected, z10.VnS());
1465 __ Dup(z10.VnD(), 0x3fc);
1493 ASSERT_EQUAL_SVE(0x00000000000003fc, z10.VnD());
1806 __ Decp(z10.VnD(), p0, z0.VnD());
1869 ASSERT_EQUAL_SVE(z0_expected, z10.VnD());
1954 __ Sqdecp(z10.VnD(), p0, z0.VnD());
2017 ASSERT_EQUAL_SVE(z0_expected, z10.VnD());
2102 __ Uqdecp(z10.VnD(), p0, z0.VnD());
2173 ASSERT_EQUAL_SVE(z0_expected, z10.VnD());
2257 __ Index(z10.VnD(), x0, x1);
2313 ASSERT_EQUAL_SVE_LANE((42 - (3 * i)) & d_mask, z10.VnD(), i);
4413 ASSERT_EQUAL_SVE(z10, z26);
5624 __ Dup(z10.VnB(), z9.VnB(), index[0]);
5688 ASSERT_EQUAL_SVE_LANE(expected_z10, z10.VnB(), i);
5746 __ Sunpkhi(z10.VnH(), z9.VnB());
5776 uint16_t expected = core.zreg_lane<uint16_t>(z10.GetCode(), i);
6549 __ Cpy(z10.VnH(), pg.Merging(), 0xff);
6611 ASSERT_EQUAL_SVE(expected_z10, z10.VnD());
6653 __ Fcpy(z10.VnS(), pg.Merging(), 0.0);
6727 ASSERT_EQUAL_SVE(expected_z10, z10.VnD());
6747 ASSERT_EQUAL_SVE(z10.VnD(), z14.VnD());
7088 __ Ld1h(z10.VnS(), p3.Zeroing(), SVEMemOperand(x0, -8, SVE_MUL_VL));
7112 // Ld1h(z10.VnS(), ...)
7219 ASSERT_EQUAL_SVE(z20, z10);
7306 __ Dup(z10.VnD(), 0);
7308 __ Mov(z10.VnD(), p3.Merging(), z18.VnD());
7398 ASSERT_EQUAL_SVE(z10, z26);
7421 __ Index(z10.VnB(), -4, 11);
7425 __ St2b(z10.VnB(), z11.VnB(), p7, SVEMemOperand(x0, x1));
7466 __ Mov(z4.VnB(), p7.Merging(), z10.VnB());
7482 __ Dup(z10.VnD(), 0);
7484 __ Mov(z10.VnD(), p4.Merging(), z31.VnD());
7512 // st2b { z10.b, z11.b }, SVE_MUL4
7570 ASSERT_EQUAL_SVE(z10, z26);
7604 __ Index(z10.VnB(), 1, -3);
7608 __ St3b(z10.VnB(), z11.VnB(), z12.VnB(), p0, SVEMemOperand(x0));
7613 __ Mov(z4.VnB(), p0.Merging(), z10.VnB());
7641 __ Dup(z10.VnS(), 0);
7644 __ Mov(z10.VnS(), p2.Merging(), z30.VnS());
7709 // st3b { z10.b, z11.b, z12.b }, SVE_ALL
7771 ASSERT_EQUAL_SVE(z10, z22);
7810 __ Index(z10.VnB(), -4, 11);
7815 __ St3b(z10.VnB(), z11.VnB(), z12.VnB(), p7, SVEMemOperand(x0, x1, LSL, 0));
7820 __ Mov(z4.VnB(), p7.Merging(), z10.VnB());
7853 __ Dup(z10.VnS(), 0);
7856 __ Mov(z10.VnS(), p5.Merging(), z30.VnS());
7919 // st3b { z10.b, z11.b, z12.b }, SVE_MUL4
7984 ASSERT_EQUAL_SVE(z10, z22);
8025 __ Index(z10.VnB(), 1, -7);
8030 __ St4b(z10.VnB(), z11.VnB(), z12.VnB(), z13.VnB(), p0, SVEMemOperand(x0));
8036 __ Mov(z3.VnB(), p0.Merging(), z10.VnB());
8057 __ Dup(z10.VnH(), 0);
8061 __ Mov(z10.VnH(), p1.Merging(), z2.VnH());
8161 // st2b { z10.b, z11.b, z12.b, z13.b }, SVE_ALL
8230 ASSERT_EQUAL_SVE(z10, z26);
8313 __ Dup(z10.VnH(), 0);
8317 __ Mov(z10.VnH(), p6.Merging(), z26.VnH());
8491 ASSERT_EQUAL_SVE(z10, z26);
8537 __ Index(z10.VnS(), 44, 4);
8544 z10.VnS(),
8576 __ Mov(z22.VnS(), p2.Merging(), z10.VnS());
8580 z10.VnS(),
8609 ASSERT_EQUAL_SVE(z10, z22);
8640 __ Index(z10.VnS(), 44, 4);
8643 __ St4w(z8.VnS(), z9.VnS(), z10.VnS(), z11.VnS(), p2, SVEMemOperand(sp));
8672 __ Mov(z22.VnS(), p2.Merging(), z10.VnS());
8676 z10.VnS(),
9363 __ Dup(z10.VnB(), 0);
9375 __ Sel(z0.VnB(), p1, z0.VnB(), z10.VnB());
9384 __ Sel(z1.VnH(), p1, z1.VnH(), z10.VnH());
9391 __ Sel(z2.VnS(), p1, z2.VnS(), z10.VnS());
9398 __ Sel(z3.VnD(), p1, z3.VnD(), z10.VnD());
9406 __ Sel(z4.VnB(), p1, z4.VnB(), z10.VnB());
9414 __ Sel(z5.VnS(), p1, z5.VnS(), z10.VnS());
9422 __ Sel(z6.VnH(), p1, z6.VnH(), z10.VnH());
9501 __ Ldff1h(z10.VnD(), all.Zeroing(), SVEMemOperand(x0, z30.VnD(), SXTW));
9590 ASSERT_EQUAL_SVE(expected_z10, z10.VnD());
9695 __ Ld1h(z10.VnD(), all.Zeroing(), SVEMemOperand(x0, z30.VnD(), SXTW));
9770 ASSERT_EQUAL_SVE(expected_z10, z10.VnD());
10372 __ Ld1h(z10.VnH(), p2.Zeroing(), SVEMemOperand(x0, -1, SVE_MUL_VL));
10389 ASSERT_EQUAL_SVE(z10, z11);
10634 ASSERT_EQUAL_SVE(z11, z10);
10657 ZRegister zn_ld_b = z10.WithLaneSize(esize_in_bits);
11277 __ Fdup(z10.VnH(), Float16(0.0));
11299 ASSERT_EQUAL_SVE(0x0000, z10.VnH());
11340 __ Mov(z10, z31);
11341 __ Orv(s10, p0, z10.VnS()); // destructive
11389 ASSERT_EQUAL_SVE_LANE(0, z10.VnD(), i);
11989 __ Dup(z10.VnS(), 0);
12001 __ Sdot(z10.VnS(), z10.VnS(), z1.VnB(), z6.VnB(), 3);
12002 __ Mul(z10.VnS(), z10.VnS(), 8);
12024 ASSERT_EQUAL_SVE(z7.VnS(), z10.VnS());
12064 __ Neg(z10.VnH(), p4.Merging(), z0.VnH());
12066 __ Sdot(z12.VnD(), z12.VnD(), z1.VnH(), z10.VnH(), 0);
12068 __ Sdot(z13.VnD(), z13.VnD(), z1.VnH(), z10.VnH(), 1);
13004 __ Dup(z10.VnB(), 0x55);
13007 __ Asr(z10.VnS(), p4.Merging(), z31.VnS(), z1.VnS());
13043 ASSERT_EQUAL_SVE(expected_z10, z10.VnD());
13077 __ Dup(z10.VnB(), 0x55);
13080 __ Asr(z10.VnS(), p4.Merging(), z31.VnS(), z1.VnD());
13103 ASSERT_EQUAL_SVE(expected_z10, z10.VnD());
13135 __ Mov(z10, z9);
13136 __ Lsl(z10.VnD(), p5.Merging(), z10.VnD(), 4);
13137 __ Asr(z11.VnD(), p0.Merging(), z10.VnD(), 5);
13163 ASSERT_EQUAL_SVE(expected_z10, z10.VnD());
13198 __ Asrd(z10.VnS(), p0.Merging(), z31.VnS(), 31);
13232 ASSERT_EQUAL_SVE(expected_z10, z10.VnD());
13727 __ Fexpa(z10.VnS(), z4.VnS());
13743 ASSERT_EQUAL_SVE(expected_z10, z10.VnD());
14459 __ Uzp1(z10.VnB(), z2.VnB(), z3.VnB());
14490 ASSERT_EQUAL_SVE(z0.VnD(), z10.VnD());
14571 __ Fsub(z10.VnS(), z0.VnS(), z1.VnS());
14602 ASSERT_EQUAL_SVE(z10.VnS(), z8.VnS());
14603 ASSERT_EQUAL_SVE(z10.VnS(), z9.VnS());
14779 __ Dup(z10.VnD(), 0);
14780 __ Fcmla(z10.VnD(), p2.Merging(), z10.VnD(), z4.VnD(), z3.VnD(), 180);
14781 __ Fcmla(z10.VnD(), p2.Merging(), z10.VnD(), z4.VnD(), z3.VnD(), 270);
14782 __ Fneg(z10.VnD(), p2.Merging(), z10.VnD());
14789 ASSERT_EQUAL_SVE(z9.VnD(), z10.VnD());
14834 __ Fmul(z10.VnD(), z1.VnD(), z0.VnD(), 0);
14883 ASSERT_EQUAL_SVE(z20.VnD(), z10.VnD());
14926 __ Mov(z10, z2);
14927 __ Ftmad(z10.VnS(), z10.VnS(), z3.VnS(), 3);
14961 ASSERT_EQUAL_SVE(expected_z10, z10.VnD());
15038 masm->Mov(z10, z8);
15039 masm->Fmaxnm(z10.WithLaneSize(ls),
15041 z10.WithLaneSize(ls),
15075 ASSERT_EQUAL_SVE(expected_z10, z10.VnD());
15108 ASSERT_EQUAL_SVE(expected_z10, z10.VnD());
15141 ASSERT_EQUAL_SVE(expected_z10, z10.VnD());
15317 __ Dup(z10.VnD(), 2045);
15319 __ Fscale(z11.VnD(), p0.Merging(), z11.VnD(), z10.VnD());
16416 __ Dup(z10.VnH(), 0);
16417 __ Fdiv(z10.VnH(), p0.Merging(), z10.VnH(), z10.VnH());
16418 __ Insr(z10.VnH(), 0x5140);
16419 __ Insr(z10.VnH(), 0xd140);
16420 __ Ext(z10.VnB(), z10.VnB(), z10.VnB(), 2);
16421 __ Fmaxnmv(h11, p0, z10.VnH());
16422 __ Fmaxnmv(h12, p4, z10.VnH());
16423 __ Fminnmv(h13, p0, z10.VnH());
16424 __ Fminnmv(h14, p4, z10.VnH());
16426 __ Dup(z10.VnS(), 0);
16427 __ Fdiv(z10.VnS(), p0.Merging(), z10.VnS(), z10.VnS());
16428 __ Insr(z10.VnS(), 0x42280000);
16429 __ Insr(z10.VnS(), 0xc2280000);
16430 __ Ext(z10.VnB(), z10.VnB(), z10.VnB(), 4);
16431 __ Fmaxnmv(s15, p0, z10.VnS());
16432 __ Fmaxnmv(s16, p2, z10.VnS());
16433 __ Fminnmv(s17, p0, z10.VnS());
16434 __ Fminnmv(s18, p2, z10.VnS());
16436 __ Dup(z10.VnD(), 0);
16437 __ Fdiv(z10.VnD(), p0.Merging(), z10.VnD(), z10.VnD());
16438 __ Insr(z10.VnD(), 0x4045000000000000);
16439 __ Insr(z10.VnD(), 0xc045000000000000);
16440 __ Ext(z10.VnB(), z10.VnB(), z10.VnB(), 8);
16441 __ Fmaxnmv(d19, p0, z10.VnD());
16442 __ Fmaxnmv(d20, p3, z10.VnD());
16443 __ Fminnmv(d21, p0, z10.VnD());
16444 __ Fminnmv(d22, p3, z10.VnD());
16685 ZRegister da_result = z10.WithLaneSize(lane_size_in_bits);
16977 (masm.*macro_idx)(z10.VnS(), z2.VnS(), z1.VnS(), z0.VnS(), 3);
17046 ASSERT_EQUAL_SVE(z22.VnS(), z10.VnS());
18442 __ Adr(z10.VnD(), SVEMemOperand(z0.VnD(), z2.VnD(), UXTW, 3));
18480 ASSERT_EQUAL_SVE(expected_z10, z10.VnD());
18559 ZRegister zn_agg = z10.WithLaneSize(esize_in_bits);
18733 __ Dup(z10.VnB(), 0);
18734 __ Saba(z10.VnB(), z10.VnB(), z9.VnB(), z10.VnB());
18759 ASSERT_EQUAL_SVE(z10, z11);
18967 __ Ldnt1sh(z10.VnD(), p2.Zeroing(), SVEMemOperand(z30.VnD(), x1));
18983 ASSERT_EQUAL_SVE(z10, z11);
19214 __ Mov(z10, z31);
19215 __ Sqrdcmlah(z10.VnS(), z10.VnS(), z0.VnS(), z1.VnS(), 0);
19242 ASSERT_EQUAL_SVE(zd_000_expected, z10.VnS());
19247 ASSERT_EQUAL_SVE(z14, z10);
19702 __ Mov(z10.VnS(), 0x8101ff40); // [-127, 1, -1, 64] as signed bytes.
19704 __ Usdot(z12.VnS(), z4.VnS(), z11.VnB(), z10.VnB());
19705 __ Usdot(z13.VnS(), z4.VnS(), z10.VnB(), z11.VnB());