Lines Matching refs:z11

311   __ Index(z11.VnB(), 0, -1);
362 ASSERT_EQUAL_SVE_LANE(0x00, z11.VnB(), i);
403 ZRegister mla_dn_result = z11.WithLaneSize(lane_size_in_bits);
1032 __ Splice(z11.VnD(), p2, z29.VnD(), z30.VnD());
1064 ASSERT_EQUAL_SVE(z11_expected, z11.VnD());
1139 InsrHelper(&masm, z11.VnB(), z11_inputs);
1142 __ Cmphs(p6.VnB(), p0.Zeroing(), z10.VnB(), z11.VnB());
1183 __ Cmpls(p12.VnB(), p0.Zeroing(), z11.VnB(), z10.VnB()); // HS
1374 InsrHelper(&masm, z11.VnH(), z23_inputs);
1379 __ Orr(z11.VnH(), z11.VnH(), 0x0ff0);
1431 ASSERT_EQUAL_SVE(z11_expected, z11.VnH());
1466 __ Dup(z11.VnS(), -516097); // 0xfff81fff, as a signed int.
1494 ASSERT_EQUAL_SVE(0xfff81fff, z11.VnS());
1807 __ Decp(z11.VnD(), p0, z1.VnD());
1870 ASSERT_EQUAL_SVE(z1_expected, z11.VnD());
1955 __ Sqdecp(z11.VnD(), p0, z1.VnD());
2018 ASSERT_EQUAL_SVE(z1_expected, z11.VnD());
2103 __ Uqdecp(z11.VnD(), p0, z1.VnD());
2174 ASSERT_EQUAL_SVE(z1_expected, z11.VnD());
2258 __ Index(z11.VnB(), w0, w1);
2316 ASSERT_EQUAL_SVE_LANE((42 - (3 * i)) & b_mask, z11.VnB(), i);
4414 ASSERT_EQUAL_SVE(z11, z27);
5625 __ Dup(z11.VnH(), z9.VnH(), index[1]);
5694 ASSERT_EQUAL_SVE_LANE(expected_z11, z11.VnH(), i);
5747 __ Sunpkhi(z11.VnS(), z9.VnH());
5784 uint32_t expected = core.zreg_lane<uint32_t>(z11.GetCode(), i);
6550 __ Cpy(z11.VnD(), pg.Zeroing(), 0x0123456789abcdef);
6616 ASSERT_EQUAL_SVE(expected_z11, z11.VnD());
6654 __ Fcpy(z11.VnH(), pg.Merging(), Float16(42.0));
6733 ASSERT_EQUAL_SVE(expected_z11, z11.VnD());
6748 ASSERT_EQUAL_SVE(z11.VnD(), z15.VnD());
6880 __ Ldr(z11, SVEMemOperand(x0, xzr)); // Test xzr operand.
6914 ASSERT_EQUAL_SVE(z1, z11);
7089 __ Ld1b(z11.VnD(), p4.Zeroing(), SVEMemOperand(x1, x2));
7116 // Ld1b(z11.VnD(), ...)
7220 ASSERT_EQUAL_SVE(z21, z11);
7284 // Registers z4-z11 will hold as-stored values (with inactive elements
7307 __ Dup(z11.VnD(), 0);
7309 __ Mov(z11.VnD(), p3.Merging(), z19.VnD());
7399 ASSERT_EQUAL_SVE(z11, z27);
7422 __ Index(z11.VnB(), -5, 11);
7425 __ St2b(z10.VnB(), z11.VnB(), p7, SVEMemOperand(x0, x1));
7460 // Registers z4-z11 will hold as-stored values (with inactive elements
7467 __ Mov(z5.VnB(), p7.Merging(), z11.VnB());
7483 __ Dup(z11.VnD(), 0);
7485 __ Mov(z11.VnD(), p4.Merging(), z0.VnD());
7512 // st2b { z10.b, z11.b }, SVE_MUL4
7571 ASSERT_EQUAL_SVE(z11, z27);
7605 __ Index(z11.VnB(), 2, -3);
7608 __ St3b(z10.VnB(), z11.VnB(), z12.VnB(), p0, SVEMemOperand(x0));
7614 __ Mov(z5.VnB(), p0.Merging(), z11.VnB());
7642 __ Dup(z11.VnS(), 0);
7645 __ Mov(z11.VnS(), p2.Merging(), z31.VnS());
7709 // st3b { z10.b, z11.b, z12.b }, SVE_ALL
7772 ASSERT_EQUAL_SVE(z11, z23);
7811 __ Index(z11.VnB(), -5, 11);
7815 __ St3b(z10.VnB(), z11.VnB(), z12.VnB(), p7, SVEMemOperand(x0, x1, LSL, 0));
7821 __ Mov(z5.VnB(), p7.Merging(), z11.VnB());
7854 __ Dup(z11.VnS(), 0);
7857 __ Mov(z11.VnS(), p5.Merging(), z31.VnS());
7919 // st3b { z10.b, z11.b, z12.b }, SVE_MUL4
7985 ASSERT_EQUAL_SVE(z11, z23);
8026 __ Index(z11.VnB(), 2, -7);
8030 __ St4b(z10.VnB(), z11.VnB(), z12.VnB(), z13.VnB(), p0, SVEMemOperand(x0));
8037 __ Mov(z4.VnB(), p0.Merging(), z11.VnB());
8076 __ Dup(z11.VnS(), 0);
8080 __ Mov(z11.VnS(), p2.Merging(), z29.VnS());
8161 // st2b { z10.b, z11.b, z12.b, z13.b }, SVE_ALL
8233 ASSERT_EQUAL_SVE(z11, z27);
8340 __ Dup(z11.VnS(), 0);
8344 __ Mov(z11.VnS(), p5.Merging(), z29.VnS());
8494 ASSERT_EQUAL_SVE(z11, z27);
8538 __ Index(z11.VnS(), 45, 4);
8545 z11.VnS(),
8577 __ Mov(z23.VnS(), p2.Merging(), z11.VnS());
8581 z11.VnS(),
8610 ASSERT_EQUAL_SVE(z11, z23);
8641 __ Index(z11.VnS(), 45, 4);
8643 __ St4w(z8.VnS(), z9.VnS(), z10.VnS(), z11.VnS(), p2, SVEMemOperand(sp));
8673 __ Mov(z23.VnS(), p2.Merging(), z11.VnS());
8677 z11.VnS(),
9502 __ Ldff1w(z11.VnD(), all.Zeroing(), SVEMemOperand(x2, z29.VnD(), UXTW));
9591 ASSERT_EQUAL_SVE(expected_z11, z11.VnD());
9696 __ Ld1w(z11.VnD(), all.Zeroing(), SVEMemOperand(x2, z29.VnD(), UXTW));
9771 ASSERT_EQUAL_SVE(expected_z11, z11.VnD());
10373 __ Ldnt1h(z11.VnH(), p2.Zeroing(), SVEMemOperand(x0, -1, SVE_MUL_VL));
10389 ASSERT_EQUAL_SVE(z10, z11);
10606 __ Index(z11.VnD(), 1, -1);
10634 ASSERT_EQUAL_SVE(z11, z10);
10658 ZRegister zn_ld_h = z11.WithLaneSize(esize_in_bits);
11278 __ Fdup(z11.VnH(), kFP16PositiveInfinity);
11300 ASSERT_EQUAL_SVE(Float16ToRawbits(kFP16PositiveInfinity), z11.VnH());
11390 ASSERT_EQUAL_SVE_LANE(0, z11.VnD(), i);
13014 __ Dup(z11.VnD(), 0x100000001);
13015 __ Lsl(z14.VnD(), p0.Merging(), z1.VnD(), z11.VnD());
13137 __ Asr(z11.VnD(), p0.Merging(), z10.VnD(), 5);
13165 ASSERT_EQUAL_SVE(expected_z11, z11.VnD());
13199 __ Asrd(z11.VnS(), p0.Merging(), z31.VnS(), 32);
13234 ASSERT_EQUAL_SVE(expected_z11, z11.VnD());
13728 __ Fexpa(z11.VnH(), z5.VnH());
13745 ASSERT_EQUAL_SVE(expected_z11, z11.VnD());
14460 __ Uzp2(z11.VnB(), z2.VnB(), z3.VnB());
14491 ASSERT_EQUAL_SVE(z1.VnD(), z11.VnD());
14558 __ Index(z11.VnS(), 1, 1);
14559 __ Scvtf(z11.VnS(), p0.Merging(), z11.VnS());
14562 __ Sel(z11.VnS(), p3, z11.VnS(), z0.VnS());
14563 __ Mov(z12, z11);
14572 __ Fcadd(z11.VnS(), p0.Merging(), z11.VnS(), z2.VnS(), 90);
14573 __ Fcadd(z11.VnS(), p0.Merging(), z11.VnS(), z12.VnS(), 270);
14604 ASSERT_EQUAL_SVE(z29.VnS(), z11.VnS());
14835 __ Fmul(z11.VnD(), z1.VnD(), z0.VnD(), 1);
14884 ASSERT_EQUAL_SVE(z21.VnD(), z11.VnD());
14928 __ Mov(z11, z2);
14929 __ Ftmad(z11.VnS(), z11.VnS(), z3.VnS(), 4);
14965 ASSERT_EQUAL_SVE(expected_z11, z11.VnD());
15175 __ Mov(z11, z0);
15176 __ Fdiv(z11.VnS(), p1.Merging(), z11.VnS(), z11.VnS());
15185 __ Mov(z16, z11);
15187 __ Mov(z17, z11);
15318 __ Dup(z11.VnD(), 0x0010000000000000); // 2^-1022
15319 __ Fscale(z11.VnD(), p0.Merging(), z11.VnD(), z10.VnD());
15342 ASSERT_EQUAL_SVE(expected_z11, z11.VnD());
16686 ZRegister dn_result = z11.WithLaneSize(lane_size_in_bits);
16981 __ Mov(z11, z0);
16982 (masm.*macro_idx)(z11.VnD(), z2.VnD(), z1.VnD(), z11.VnD(), 0); // zd == zm
17048 ASSERT_EQUAL_SVE(z23.VnD(), z11.VnD());
17050 ASSERT_EQUAL_SVE(z11.VnD(), z13.VnD());
17792 ZRegister zt_fp_1 = z11.WithLaneSize(lane_size_in_bits);
18054 __ Index(z11.WithLaneSize(ls), 42, 1);
18056 __ Mov(z21, z11);
18057 __ Mov(z9, z11);
18058 (masm.*macro_m)(z11.WithLaneSize(ds), p1.Merging(), z28.WithLaneSize(ss));
18443 __ Adr(z11.VnD(), SVEMemOperand(z0.VnD(), z2.VnD(), SXTW));
18481 ASSERT_EQUAL_SVE(expected_z11, z11.VnD());
18560 ZRegister zn_agg_ref = z11.WithLaneSize(esize_in_bits);
18643 __ Prfd(PLDL3KEEP, p5, SVEMemOperand(z11.VnD(), 9));
18735 __ Index(z11.VnH(), 0x0101, 1);
18759 ASSERT_EQUAL_SVE(z10, z11);
18968 __ Ld1sh(z11.VnD(), p2.Zeroing(), SVEMemOperand(x1, z30.VnD()));
18983 ASSERT_EQUAL_SVE(z10, z11);
19217 __ Mov(z11, z31);
19218 __ Sqrdcmlah(z11.VnS(), z11.VnS(), z0.VnS(), z1.VnS(), 90);
19243 ASSERT_EQUAL_SVE(zd_090_expected, z11.VnS());
19248 ASSERT_EQUAL_SVE(z15, z11);
19703 __ Mov(z11.VnS(), 0x02fe8002); // [2, 254, 128, 2] as unsigned bytes.
19704 __ Usdot(z12.VnS(), z4.VnS(), z11.VnB(), z10.VnB());
19705 __ Usdot(z13.VnS(), z4.VnS(), z10.VnB(), z11.VnB());
20118 __ Dup(z11.VnQ(), z0.VnQ(), 2);