/third_party/vixl/test/aarch64/ |
H A D | test-utils-aarch64.h | 62 size_t lane_size = sizeof(T); in GetLane() local 64 VIXL_CHECK(kSizeInBytes >= ((lane + 1) * lane_size)); in GetLane() 66 memcpy(&result, bytes + (lane * lane_size), lane_size); in GetLane() 72 size_t lane_size = sizeof(value); in SetLane() local 73 VIXL_CHECK(kSizeInBytes >= ((lane + 1) * lane_size)); in SetLane() 74 memcpy(bytes + (lane * lane_size), &value, lane_size); in SetLane() 466 int lane_size = result.GetLaneSizeInBits(); 467 for (int lane = 0; lane < core->GetSVELaneCount(lane_size); [all...] |
H A D | test-utils-aarch64.cc | 362 unsigned lane_size = reg.GetLaneSizeInBits(); in EqualSVELane() local 367 VIXL_ASSERT(IsUintN(lane_size, expected) || in EqualSVELane() 368 IsIntN(lane_size, RawbitsToInt64(expected))); in EqualSVELane() 369 expected &= GetUintMask(lane_size); in EqualSVELane() 371 uint64_t result = core->zreg_lane(reg.GetCode(), lane_size, lane); in EqualSVELane() 373 unsigned lane_size_in_hex_chars = lane_size / 4; in EqualSVELane()
|
H A D | test-assembler-sve-aarch64.cc | 12700 __ Lsl(zd_lsl, zn, shift - 1); // Lsl supports 0 - lane_size-1. 17888 int lane_size = lane_sizes[i]; 17890 TestFpCompareHelper(config, lane_size, gt, zn, zm, pd_fcm_gt); 17891 TestFpCompareHelper(config, lane_size, lt, zn, zm, pd_fcm_lt); 17892 TestFpCompareHelper(config, lane_size, ge, zn, zm, pd_fcm_ge); 17893 TestFpCompareHelper(config, lane_size, le, zn, zm, pd_fcm_le); 17894 TestFpCompareHelper(config, lane_size, eq, zn, zm, pd_fcm_eq); 17895 TestFpCompareHelper(config, lane_size, ne, zn, zm, pd_fcm_ne); 17896 TestFpCompareHelper(config, lane_size, uo, zn, zm, pd_fcm_uo); 17899 TestFpCompareHelper(config, lane_size, g [all...] |
/third_party/vixl/src/aarch64/ |
H A D | simulator-aarch64.cc | 1154 unsigned reg_size, unsigned lane_size) { in Simulator() 1155 VIXL_ASSERT(reg_size >= lane_size); in Simulator() 1158 if (reg_size != lane_size) { in Simulator() 1172 switch (lane_size) { in Simulator() 1355 int lane_size = GetPrintRegLaneSizeInBytes(format); in Simulator() local 1360 bool access = (lane_mask & (1 << (i * lane_size))) != 0; in Simulator() 1366 switch (lane_size) { in Simulator() 1370 memcpy(&element_fp16, &value[i * lane_size], sizeof(element_fp16)); in Simulator() 1376 memcpy(&element_fp32, &value[i * lane_size], sizeof(element_fp32)); in Simulator() 1381 memcpy(&element, &value[i * lane_size], sizeo in Simulator() 1153 GetPrintRegisterFormatForSize( unsigned reg_size, unsigned lane_size) Simulator() argument 2617 int lane_size = shift_and_lane_size.second; Simulator() local 2645 int lane_size = shift_and_lane_size.second; Simulator() local 2924 int lane_size = shift_and_lane_size.second; Simulator() local 3221 int lane_size = shift_and_lane_size.second; Simulator() local 3517 unsigned lane_size = shift_and_lane_size.second; Simulator() local 3553 unsigned lane_size = shift_and_lane_size.second; Simulator() local 8612 int lane_size = LaneSizeInBytesFromFormat(vf); Simulator() local 9674 int lane_size = instr->GetSVEBitwiseImmLaneSizeInBytesLog2(); Simulator() local 9696 int lane_size = instr->GetSVEBitwiseImmLaneSizeInBytesLog2(); Simulator() local 9767 unsigned lane_size = shift_and_lane_size.second; Simulator() local 9927 unsigned lane_size = shift_and_lane_size.second; Simulator() local 10519 int lane_size = LaneSizeInBitsFromFormat(vform); Simulator() local [all...] |
H A D | logic-aarch64.cc | 580 int lane_size = LaneSizeInBitsFromFormat(vform); in add() local 601 dst.SetInt(vform, i, ur >> (64 - lane_size)); in add() 610 int lane_size = LaneSizeInBitsFromFormat(vform); in add_uint() local 611 VIXL_ASSERT(IsUintN(lane_size, value)); in add_uint() 614 uint64_t ub = value << (64 - lane_size); in add_uint() 630 dst.SetInt(vform, i, ur >> (64 - lane_size)); in add_uint() 972 int lane_size = LaneSizeInBitsFromFormat(vform); in sub() local 993 dst.SetInt(vform, i, ur >> (64 - lane_size)); in sub() 1002 int lane_size = LaneSizeInBitsFromFormat(vform); in sub_uint() local 1003 VIXL_ASSERT(IsUintN(lane_size, valu in sub_uint() [all...] |
H A D | assembler-aarch64.cc | 2866 unsigned lane_size = vt.GetLaneSizeInBytes(); in LoadStoreStructSingle() local 2867 VIXL_ASSERT(lane_size > 0); in LoadStoreStructSingle() 2868 VIXL_ASSERT(lane < (kQRegSizeInBytes / lane_size)); in LoadStoreStructSingle() 2872 lane *= lane_size; in LoadStoreStructSingle() 2873 if (lane_size == 8) lane++; in LoadStoreStructSingle() 2880 switch (lane_size) { in LoadStoreStructSingle() 2891 VIXL_ASSERT(lane_size == 8); in LoadStoreStructSingle() 5018 int lane_size = vn.GetLaneSizeInBytes(); 5020 switch (lane_size) { 5031 VIXL_ASSERT(lane_size [all...] |
H A D | instructions-aarch64.cc | 1405 int lane_size = LaneSizeInBitsFromFormat(vform); in MaxIntFromFormat() local 1406 return static_cast<int64_t>(GetUintMask(lane_size) >> 1); in MaxIntFromFormat()
|
H A D | registers-aarch64.h | 536 EncodedSize lane_size, 542 lane_size_(lane_size) {}
|
H A D | disasm-aarch64.cc | 3982 int lane_size = instr->GetSVEBitwiseImmLaneSizeInBytesLog2(); in Disassembler() local 3983 mnemonic = SVEMoveMaskPreferred(imm, lane_size) ? "mov" : "dupm"; in Disassembler() 5100 unsigned lane_size = instr->GetSVESize(); in Disassembler() local 5132 if (lane_size <= kSRegSizeInBytesLog2) { in Disassembler() 5736 int lane_size = shift_and_lane_size.second; in Disassembler() local 5739 shift_dist = (8 << lane_size) - shift_dist; in Disassembler() 5740 if ((lane_size >= static_cast<int>(kBRegSizeInBytesLog2)) && in Disassembler() 5741 (lane_size <= static_cast<int>(kSRegSizeInBytesLog2)) && in Disassembler() 5774 int lane_size = shift_and_lane_size.second; in Disassembler() local 5775 if ((lane_size > in Disassembler() 5787 int lane_size = shift_and_lane_size.second; Disassembler() local [all...] |
H A D | macro-assembler-sve-aarch64.cc | 481 unsigned lane_size = zd.GetLaneSizeInBits(); in Dup() local 488 } else if (IsImmLogical(imm.AsUintN(lane_size), lane_size)) { in Dup() 490 dupm(zd, imm.AsUintN(lane_size)); in Dup()
|
H A D | assembler-sve-aarch64.cc | 56 int lane_size = zd.GetLaneSizeInBits(); in adr() local 57 VIXL_ASSERT((lane_size == kSRegSize) || (lane_size == kDRegSize)); in adr() 67 VIXL_ASSERT(lane_size == kDRegSize); in adr() 71 VIXL_ASSERT(lane_size == kDRegSize); in adr() 76 op = (lane_size == kSRegSize) ? ADR_z_az_s_same_scaled in adr() 90 unsigned lane_size = zdn.GetLaneSizeInBits(); in SVELogicalImmediate() local 92 if (IsImmLogical(imm, lane_size, &bit_n, &imm_s, &imm_r)) { in SVELogicalImmediate() 93 Emit(op | Rd(zdn) | SVEBitN(bit_n) | SVEImmRotate(imm_r, lane_size) | in SVELogicalImmediate() 94 SVEImmSetBits(imm_s, lane_size)); in SVELogicalImmediate() [all...] |
H A D | assembler-aarch64.h | 7288 static Instr SVEImmSetBits(unsigned imms, unsigned lane_size) { 7290 VIXL_ASSERT((lane_size == kDRegSize) || IsUint6(imms + 3)); 7291 USE(lane_size); 7295 static Instr SVEImmRotate(unsigned immr, unsigned lane_size) { 7296 VIXL_ASSERT(IsUintN(WhichPowerOf2(lane_size), immr)); 7297 USE(lane_size);
|
H A D | macro-assembler-aarch64.h | 4528 int lane_size = std::max(zd.GetLaneSizeInBits(), zn.GetLaneSizeInBits()); in Fcvt() local 4530 zd.WithLaneSize(lane_size), in Fcvt() 4532 zn.WithLaneSize(lane_size)); in Fcvt()
|
H A D | simulator-aarch64.h | 2351 unsigned lane_size);
|
/third_party/node/deps/v8/src/codegen/arm64/ |
H A D | assembler-arm64.cc | 1881 int lane_size = vd.LaneSizeInBytes(); in ins() local 1883 switch (lane_size) { in ins() 1897 DCHECK_EQ(lane_size, 8); in ins() 1916 int lane_size = vn.LaneSizeInBytes(); in smov() local 1919 switch (lane_size) { in smov() 1927 DCHECK_EQ(lane_size, 4); in smov() 2067 int lane_size = vn.LaneSizeInBytes(); in umov() local 2070 switch (lane_size) { in umov() 2084 DCHECK_EQ(lane_size, 8); in umov() 2113 int lane_size in ins() local 2553 unsigned lane_size = vt.LaneSizeInBytes(); LoadStoreStructSingle() local 3493 int lane_size = vn.LaneSizeInBytes(); dup() local [all...] |
/third_party/node/deps/v8/src/compiler/backend/arm64/ |
H A D | instruction-selector-arm64.cc | 3803 bool ShraHelper(InstructionSelector* selector, Node* node, int lane_size, in ShraHelper() argument 3812 if (g.GetIntegerConstantValue(m.left()->InputAt(1)) % lane_size == 0) { in ShraHelper() 3817 selector->Emit(shra_code | LaneSizeField::encode(lane_size), in ShraHelper() 3825 bool AdalpHelper(InstructionSelector* selector, Node* node, int lane_size, in AdalpHelper() argument 3830 selector->Emit(adalp_code | LaneSizeField::encode(lane_size), in AdalpHelper() 3847 bool SmlalHelper(InstructionSelector* selector, Node* node, int lane_size, in SmlalHelper() argument 3853 selector->Emit(smlal_code | LaneSizeField::encode(lane_size), in SmlalHelper() 4239 Node* node, int lane_size) { in VisitSignExtendLong() 4241 code |= LaneSizeField::encode(lane_size); in VisitSignExtendLong() 4238 VisitSignExtendLong(InstructionSelector* selector, ArchOpcode opcode, Node* node, int lane_size) VisitSignExtendLong() argument
|
/third_party/node/deps/v8/src/execution/arm64/ |
H A D | simulator-arm64.cc | 1196 size_t reg_size, size_t lane_size) { 1197 DCHECK_GE(reg_size, lane_size); 1200 if (reg_size != lane_size) { 1213 switch (lane_size) { 1573 int lane_size = 1 << lane_size_log2; 1584 PrintVRegisterFPHelper(code, lane_size, lane_count); 1661 int lane_size = GetPrintRegLaneSizeInBytes(format); 1663 PrintVRegisterRawHelper(reg_code, reg_size, lane_size * lane); 1665 PrintVRegisterFPHelper(reg_code, lane_size, lane_count, lane); 4959 int lane_size [all...] |
H A D | simulator-logic-arm64.cc | 647 int lane_size = LaneSizeInBitsFromFormat(vform); in add() local 668 dst.SetInt(vform, i, ur >> (64 - lane_size)); in add() 965 int lane_size = LaneSizeInBitsFromFormat(vform); in sub() local 986 dst.SetInt(vform, i, ur >> (64 - lane_size)); in sub()
|
H A D | simulator-arm64.h | 1293 size_t lane_size);
|
/third_party/node/deps/v8/src/codegen/s390/ |
H A D | macro-assembler-s390.cc | 5875 #define EXT_ADD_PAIRWISE(dst, src, scratch1, scratch2, lane_size, mul_even, \ in CallRecordWriteStub() 5878 vrepi(scratch2, Operand(1), Condition(lane_size)); \ in CallRecordWriteStub() 5880 Condition(lane_size)); \ in CallRecordWriteStub() 5882 Condition(lane_size)); \ in CallRecordWriteStub() 5884 Condition(lane_size + 1)); in CallRecordWriteStub()
|
/third_party/node/deps/v8/src/execution/ppc/ |
H A D | simulator-ppc.cc | 4469 size_t lane_size = sizeof(input_type); \ in ExecuteGeneric() 4472 j = lane_size; \ in ExecuteGeneric() 4474 for (; j < kSimd128Size; i += 2, j += lane_size * 2, k++) { \ in ExecuteGeneric()
|
/third_party/node/deps/v8/src/execution/s390/ |
H A D | simulator-s390.cc | 3350 size_t lane_size = sizeof(input_type); \ 3353 j = lane_size; \ 3355 for (; j < kSimd128Size; i += 2, j += lane_size * 2, k++) { \
|