Home
last modified time | relevance | path

Searched refs:SVE_LSL (Results 1 - 7 of 7) sorted by relevance

/third_party/vixl/src/aarch64/
H A Doperands-aarch64.h538 VIXL_ASSERT(mod_ != SVE_LSL); // LSL requires an explicit shift amount. in SVEMemOperand()
608 ((mod_ == NO_SVE_OFFSET_MODIFIER) || (mod_ == SVE_LSL)); in IsScalarPlusScalar()
678 return ((mod_ == SVE_LSL) || (mod_ == NO_SVE_OFFSET_MODIFIER)); in IsEquivalentToLSL()
680 return mod_ == SVE_LSL; in IsEquivalentToLSL()
690 if (shift == LSL) return SVE_LSL; in GetSVEOffsetModifierFor()
H A Doperands-aarch64.cc396 case SVE_LSL: in IsValid()
H A Dassembler-sve-aarch64.cc74 case SVE_LSL: in adr()
3999 case SVE_LSL: in SVEScatterGatherHelper()
4575 VIXL_ASSERT(addr.GetOffsetModifier() == SVE_LSL); in SVEContiguousPrefetchScalarPlusScalarHelper()
4580 VIXL_ASSERT(addr.GetOffsetModifier() == SVE_LSL); in SVEContiguousPrefetchScalarPlusScalarHelper()
4585 VIXL_ASSERT(addr.GetOffsetModifier() == SVE_LSL); in SVEContiguousPrefetchScalarPlusScalarHelper()
4630 if ((mod == NO_SVE_OFFSET_MODIFIER) || (mod == SVE_LSL)) { in SVEContiguousPrefetchScalarPlusVectorHelper()
4639 VIXL_ASSERT(mod == SVE_LSL); in SVEContiguousPrefetchScalarPlusVectorHelper()
4643 VIXL_ASSERT(mod == SVE_LSL); in SVEContiguousPrefetchScalarPlusVectorHelper()
4647 VIXL_ASSERT(mod == SVE_LSL); in SVEContiguousPrefetchScalarPlusVectorHelper()
5093 if (mod == SVE_LSL) { in SVEMemOperandHelper()
[all...]
H A Dmacro-assembler-sve-aarch64.cc349 if (mod == SVE_LSL) { in CalculateSVEAddress()
H A Dconstants-aarch64.h375 SVE_LSL, enumerator
H A Dsimulator-aarch64.cc12155 SVEGatherLoadScalarPlusVectorHelper(instr, kFormatVnD, SVE_LSL); in Simulator()
12687 SVE_LSL, in Simulator()
H A Dlogic-aarch64.cc7682 case SVE_LSL:

Completed in 57 milliseconds