1// Copyright 2017, VIXL authors
2// All rights reserved.
3//
4// Redistribution and use in source and binary forms, with or without
5// modification, are permitted provided that the following conditions are met:
6//
7//   * Redistributions of source code must retain the above copyright notice,
8//     this list of conditions and the following disclaimer.
9//   * Redistributions in binary form must reproduce the above copyright notice,
10//     this list of conditions and the following disclaimer in the documentation
11//     and/or other materials provided with the distribution.
12//   * Neither the name of ARM Limited nor the names of its contributors may be
13//     used to endorse or promote products derived from this software without
14//     specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS CONTRIBUTORS "AS IS" AND
17// ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
18// WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
19// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
20// FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
21// DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
22// SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
23// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
24// OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
25// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26
27#include <cstdio>
28#include <cstring>
29#include <string>
30
31#include "test-runner.h"
32#include "test-utils.h"
33
34#include "aarch64/macro-assembler-aarch64.h"
35#include "aarch64/registers-aarch64.h"
36#include "aarch64/simulator-aarch64.h"
37#include "aarch64/test-utils-aarch64.h"
38
39#define __ masm.
40#define TEST(name) TEST_(AARCH64_API_##name)
41
42
43namespace vixl {
44namespace aarch64 {
45
46// Check compiler intrinsics helpers.
47
48TEST(count_leading_sign_bits) {
49  class Helper {
50   public:
51    static void Check(int64_t value, int non_sign_bits) {
52      VIXL_ASSERT((0 <= non_sign_bits) && (non_sign_bits < 64));
53
54      for (int width = 1; width <= 64; width *= 2) {
55        // Note that leading_sign_bits does not include the topmost bit.
56        int leading_sign_bits = width - non_sign_bits - 1;
57        if (leading_sign_bits < 0) continue;
58
59        int64_t result = CountLeadingSignBits(value, width);
60        int64_t fallback_result = CountLeadingSignBitsFallBack(value, width);
61        VIXL_CHECK(result == leading_sign_bits);
62        VIXL_CHECK(fallback_result == leading_sign_bits);
63      }
64    }
65  };
66
67  // Basic positive (and zero) cases. Sign bits are all zeroes.
68  Helper::Check(0, 0);  // 0b++++
69  Helper::Check(1, 1);  // 0b+++1
70  Helper::Check(2, 2);  // 0b++10
71  Helper::Check(3, 2);  // 0b++11
72  Helper::Check(4, 3);  // 0b+100
73
74  // Basic negative cases. Sign bits are all ones.
75  Helper::Check(-1, 0);  // 0b----
76  Helper::Check(-2, 1);  // 0b---0
77  Helper::Check(-3, 2);  // 0b--01
78  Helper::Check(-4, 2);  // 0b--00
79  Helper::Check(-5, 3);  // 0b-011
80
81  // Boundary conditions.
82  Helper::Check(INT8_MAX, 7);
83  Helper::Check(INT8_MIN, 7);
84  Helper::Check(static_cast<int64_t>(INT8_MAX) + 1, 8);
85  Helper::Check(static_cast<int64_t>(INT8_MIN) - 1, 8);
86
87  Helper::Check(INT16_MAX, 15);
88  Helper::Check(INT16_MIN, 15);
89  Helper::Check(static_cast<int64_t>(INT16_MAX) + 1, 16);
90  Helper::Check(static_cast<int64_t>(INT16_MIN) - 1, 16);
91
92  Helper::Check(INT32_MAX, 31);
93  Helper::Check(INT32_MIN, 31);
94  Helper::Check(static_cast<int64_t>(INT32_MAX) + 1, 32);
95  Helper::Check(static_cast<int64_t>(INT32_MIN) - 1, 32);
96
97  Helper::Check(INT64_MAX, 63);
98  Helper::Check(INT64_MIN, 63);
99
100  // Check automatic width detection.
101  VIXL_CHECK(CountLeadingSignBits(static_cast<int8_t>(42)) == 1);  // 0b00101010
102  VIXL_CHECK(CountLeadingSignBits(static_cast<int16_t>(42)) == 9);
103  VIXL_CHECK(CountLeadingSignBits(static_cast<int32_t>(42)) == 25);
104  VIXL_CHECK(CountLeadingSignBits(static_cast<int64_t>(42)) == 57);
105}
106
107// Check SimFloat16 class mechanics.
108TEST(float16_operators) {
109  ::vixl::internal::SimFloat16 f1 = kFP16DefaultNaN;
110  ::vixl::internal::SimFloat16 f2 = kFP16DefaultNaN;
111  ::vixl::internal::SimFloat16 f3 = kFP16PositiveInfinity;
112  ::vixl::internal::SimFloat16 f4 = kFP16NegativeInfinity;
113  VIXL_CHECK(!(f1 == f2));
114  VIXL_CHECK(f1 != f2);
115  VIXL_CHECK(!(f3 == f4));
116  VIXL_CHECK(f3 != f4);
117  VIXL_CHECK(::vixl::internal::SimFloat16(kFP16PositiveZero) ==
118             ::vixl::internal::SimFloat16(kFP16NegativeZero));
119  VIXL_CHECK(!(::vixl::internal::SimFloat16(kFP16PositiveZero) !=
120               ::vixl::internal::SimFloat16(kFP16NegativeZero)));
121}
122
123TEST(rawbits_conversions) {
124  VIXL_CHECK(RawbitsToInt64(0x0) == 0x0);
125  VIXL_CHECK(RawbitsToInt64(0x123) == 0x123);
126  VIXL_CHECK(RawbitsToInt64(INT64_MAX) == INT64_MAX);
127  VIXL_CHECK(RawbitsToInt64(UINT64_C(0xffffffffffffffff)) == -1);
128  VIXL_CHECK(RawbitsToInt64(UINT64_C(0x8000000000000000)) == INT64_MIN);
129  VIXL_CHECK(RawbitsToInt64(UINT64_C(0x8000000000000001)) == -INT64_MAX);
130
131  VIXL_CHECK(RawbitsToInt32(0x0) == 0x0);
132  VIXL_CHECK(RawbitsToInt32(0x123) == 0x123);
133  VIXL_CHECK(RawbitsToInt32(INT32_MAX) == INT32_MAX);
134  VIXL_CHECK(RawbitsToInt32(UINT32_C(0xffffffff)) == -1);
135  VIXL_CHECK(RawbitsToInt32(UINT32_C(0x80000000)) == INT32_MIN);
136  VIXL_CHECK(RawbitsToInt32(UINT32_C(0x80000001)) == -INT32_MAX);
137}
138
139// Check moved FP constants are still accessible via the AArch64 namespace.
140TEST(float_constants_scope) {
141  VIXL_CHECK(vixl::aarch64::kFP64PositiveInfinity ==
142             vixl::kFP64PositiveInfinity);
143  VIXL_CHECK(vixl::aarch64::kFP64NegativeInfinity ==
144             vixl::kFP64NegativeInfinity);
145  VIXL_CHECK(vixl::aarch64::kFP32PositiveInfinity ==
146             vixl::kFP32PositiveInfinity);
147  VIXL_CHECK(vixl::aarch64::kFP32NegativeInfinity ==
148             vixl::kFP32NegativeInfinity);
149  VIXL_CHECK(Float16ToRawbits(vixl::aarch64::kFP16PositiveInfinity) ==
150             Float16ToRawbits(vixl::aarch64::kFP16PositiveInfinity));
151  VIXL_CHECK(Float16ToRawbits(vixl::aarch64::kFP16NegativeInfinity) ==
152             Float16ToRawbits(vixl::aarch64::kFP16NegativeInfinity));
153  VIXL_CHECK(DoubleToRawbits(vixl::aarch64::kFP64DefaultNaN) ==
154             DoubleToRawbits(vixl::kFP64DefaultNaN));
155  VIXL_CHECK(FloatToRawbits(vixl::aarch64::kFP32DefaultNaN) ==
156             FloatToRawbits(vixl::kFP32DefaultNaN));
157  VIXL_CHECK(IsNaN(vixl::aarch64::kFP16DefaultNaN) ==
158             IsNaN(vixl::kFP16DefaultNaN));
159  VIXL_CHECK(vixl::aarch64::kDoubleExponentBits == vixl::kDoubleExponentBits);
160  VIXL_CHECK(vixl::aarch64::kDoubleMantissaBits == vixl::kDoubleMantissaBits);
161  VIXL_CHECK(vixl::aarch64::kFloatExponentBits == vixl::kFloatExponentBits);
162  VIXL_CHECK(vixl::aarch64::kFloatMantissaBits == vixl::kFloatMantissaBits);
163  VIXL_CHECK(vixl::aarch64::kFloat16ExponentBits == vixl::kFloat16ExponentBits);
164  VIXL_CHECK(vixl::aarch64::kFloat16MantissaBits == vixl::kFloat16MantissaBits);
165}
166
167
168TEST(register_bit) {
169  VIXL_CHECK(x0.GetBit() == (UINT64_C(1) << 0));
170  VIXL_CHECK(x1.GetBit() == (UINT64_C(1) << 1));
171  VIXL_CHECK(x10.GetBit() == (UINT64_C(1) << 10));
172
173  // AAPCS64 definitions.
174  VIXL_CHECK(lr.GetBit() == (UINT64_C(1) << kLinkRegCode));
175
176  // Fixed (hardware) definitions.
177  VIXL_CHECK(xzr.GetBit() == (UINT64_C(1) << kZeroRegCode));
178
179  // Internal ABI definitions.
180  VIXL_CHECK(sp.GetBit() == (UINT64_C(1) << kSPRegInternalCode));
181  VIXL_CHECK(sp.GetBit() != xzr.GetBit());
182
183  // xn.GetBit() == wn.GetBit() at all times, for the same n.
184  VIXL_CHECK(x0.GetBit() == w0.GetBit());
185  VIXL_CHECK(x1.GetBit() == w1.GetBit());
186  VIXL_CHECK(x10.GetBit() == w10.GetBit());
187  VIXL_CHECK(xzr.GetBit() == wzr.GetBit());
188  VIXL_CHECK(sp.GetBit() == wsp.GetBit());
189}
190
191
192TEST(noreg) {
193  VIXL_CHECK(NoReg.Is(NoVReg));
194  VIXL_CHECK(NoVReg.Is(NoReg));
195
196  VIXL_CHECK(NoVReg.Is(NoReg));
197  VIXL_CHECK(NoReg.Is(NoVReg));
198
199  VIXL_CHECK(NoReg.Is(NoCPUReg));
200  VIXL_CHECK(NoCPUReg.Is(NoReg));
201
202  VIXL_CHECK(NoVReg.Is(NoCPUReg));
203  VIXL_CHECK(NoCPUReg.Is(NoVReg));
204
205  VIXL_CHECK(NoVReg.Is(NoCPUReg));
206  VIXL_CHECK(NoCPUReg.Is(NoVReg));
207
208  VIXL_CHECK(NoReg.IsNone());
209  VIXL_CHECK(NoVReg.IsNone());
210  VIXL_CHECK(NoCPUReg.IsNone());
211}
212
213
214TEST(constructors) {
215  // *Register(code)
216  VIXL_CHECK(WRegister(0).Is(w0));
217  VIXL_CHECK(XRegister(1).Is(x1));
218
219  VIXL_CHECK(BRegister(2).Is(b2));
220  VIXL_CHECK(HRegister(3).Is(h3));
221  VIXL_CHECK(SRegister(4).Is(s4));
222  VIXL_CHECK(DRegister(5).Is(d5));
223  VIXL_CHECK(QRegister(6).Is(q6));
224
225  VIXL_CHECK(ZRegister(7).Is(z7));
226  VIXL_CHECK(PRegister(8).Is(p8));
227}
228
229
230TEST(constructors_r) {
231  // Register(code, size_in_bits)
232  VIXL_CHECK(Register(0, kWRegSize).Is(w0));
233  VIXL_CHECK(Register(1, kXRegSize).Is(x1));
234}
235
236
237TEST(constructors_v) {
238  // VRegister(code)
239  VIXL_CHECK(VRegister(0).Is(v0));
240  VIXL_CHECK(VRegister(1).Is(v1));
241  VIXL_CHECK(VRegister(2).Is(v2));
242  VIXL_CHECK(VRegister(3).Is(v3));
243  VIXL_CHECK(VRegister(4).Is(v4));
244
245  // VRegister(code, size_in_bits)
246  VIXL_CHECK(VRegister(0, kBRegSize).Is(b0));
247  VIXL_CHECK(VRegister(1, kHRegSize).Is(h1));
248  VIXL_CHECK(VRegister(2, kSRegSize).Is(s2));
249  VIXL_CHECK(VRegister(3, kDRegSize).Is(d3));
250  VIXL_CHECK(VRegister(4, kQRegSize).Is(q4));
251
252  // VRegister(code, size_in_bits, lanes)
253  VIXL_CHECK(VRegister(0, kBRegSize, 1).Is(b0));
254  VIXL_CHECK(VRegister(1, kHRegSize, 1).Is(h1));
255  VIXL_CHECK(VRegister(2, kSRegSize, 1).Is(s2));
256  VIXL_CHECK(VRegister(3, kDRegSize, 1).Is(d3));
257  VIXL_CHECK(VRegister(4, kQRegSize, 1).Is(q4));
258
259  VIXL_CHECK(VRegister(0, kSRegSize, 2).Is(v0.V2H()));
260
261  VIXL_CHECK(VRegister(1, kDRegSize, 1).Is(v1.V1D()));
262  VIXL_CHECK(VRegister(2, kDRegSize, 2).Is(v2.V2S()));
263  VIXL_CHECK(VRegister(3, kDRegSize, 4).Is(v3.V4H()));
264  VIXL_CHECK(VRegister(4, kDRegSize, 8).Is(v4.V8B()));
265
266  VIXL_CHECK(VRegister(5, kQRegSize, 2).Is(v5.V2D()));
267  VIXL_CHECK(VRegister(6, kQRegSize, 4).Is(v6.V4S()));
268  VIXL_CHECK(VRegister(7, kQRegSize, 8).Is(v7.V8H()));
269  VIXL_CHECK(VRegister(8, kQRegSize, 16).Is(v8.V16B()));
270
271  // VRegister(code, format)
272  VIXL_CHECK(VRegister(0, kFormatB).Is(b0));
273  VIXL_CHECK(VRegister(1, kFormatH).Is(h1));
274  VIXL_CHECK(VRegister(2, kFormatS).Is(s2));
275  VIXL_CHECK(VRegister(3, kFormatD).Is(d3));
276  VIXL_CHECK(VRegister(4, kFormat8B).Is(v4.V8B()));
277  VIXL_CHECK(VRegister(5, kFormat16B).Is(v5.V16B()));
278  VIXL_CHECK(VRegister(6, kFormat2H).Is(v6.V2H()));
279  VIXL_CHECK(VRegister(7, kFormat4H).Is(v7.V4H()));
280  VIXL_CHECK(VRegister(8, kFormat8H).Is(v8.V8H()));
281  VIXL_CHECK(VRegister(9, kFormat2S).Is(v9.V2S()));
282  VIXL_CHECK(VRegister(10, kFormat4S).Is(v10.V4S()));
283  VIXL_CHECK(VRegister(11, kFormat1D).Is(v11.V1D()));
284  VIXL_CHECK(VRegister(12, kFormat2D).Is(v12.V2D()));
285}
286
287
288TEST(constructors_z) {
289  // ZRegister(code, lane_size_in_bits)
290  VIXL_CHECK(ZRegister(0, kBRegSize).Is(z0.VnB()));
291  VIXL_CHECK(ZRegister(1, kHRegSize).Is(z1.VnH()));
292  VIXL_CHECK(ZRegister(2, kSRegSize).Is(z2.VnS()));
293  VIXL_CHECK(ZRegister(3, kDRegSize).Is(z3.VnD()));
294
295  // ZRegister(code, format)
296  VIXL_CHECK(ZRegister(0, kFormatVnB).Is(z0.VnB()));
297  VIXL_CHECK(ZRegister(1, kFormatVnH).Is(z1.VnH()));
298  VIXL_CHECK(ZRegister(2, kFormatVnS).Is(z2.VnS()));
299  VIXL_CHECK(ZRegister(3, kFormatVnD).Is(z3.VnD()));
300}
301
302
303TEST(constructors_p) {
304  // ZRegister(code, lane_size_in_bits)
305  VIXL_CHECK(PRegisterWithLaneSize(0, kBRegSize).Is(p0.VnB()));
306  VIXL_CHECK(PRegisterWithLaneSize(1, kHRegSize).Is(p1.VnH()));
307  VIXL_CHECK(PRegisterWithLaneSize(2, kSRegSize).Is(p2.VnS()));
308  VIXL_CHECK(PRegisterWithLaneSize(3, kDRegSize).Is(p3.VnD()));
309
310  // ZRegister(code, format)
311  VIXL_CHECK(PRegisterWithLaneSize(0, kFormatVnB).Is(p0.VnB()));
312  VIXL_CHECK(PRegisterWithLaneSize(1, kFormatVnH).Is(p1.VnH()));
313  VIXL_CHECK(PRegisterWithLaneSize(2, kFormatVnS).Is(p2.VnS()));
314  VIXL_CHECK(PRegisterWithLaneSize(3, kFormatVnD).Is(p3.VnD()));
315
316  VIXL_CHECK(PRegisterZ(0).Is(p0.Zeroing()));
317  VIXL_CHECK(PRegisterM(1).Is(p1.Merging()));
318}
319
320
321TEST(constructors_cpu) {
322  // ZRegister(code, size_in_bits, type)
323  VIXL_CHECK(CPURegister(0, kWRegSize, CPURegister::kRegister).Is(w0));
324  VIXL_CHECK(CPURegister(1, kXRegSize, CPURegister::kRegister).Is(x1));
325
326  VIXL_CHECK(CPURegister(2, kBRegSize, CPURegister::kVRegister).Is(b2));
327  VIXL_CHECK(CPURegister(3, kHRegSize, CPURegister::kVRegister).Is(h3));
328  VIXL_CHECK(CPURegister(4, kSRegSize, CPURegister::kVRegister).Is(s4));
329  VIXL_CHECK(CPURegister(5, kDRegSize, CPURegister::kVRegister).Is(d5));
330  VIXL_CHECK(CPURegister(6, kQRegSize, CPURegister::kVRegister).Is(q6));
331  VIXL_CHECK(CPURegister(7, kQRegSize, CPURegister::kVRegister).Is(v7));
332
333  VIXL_CHECK(CPURegister(0, CPURegister::kUnknownSize, CPURegister::kVRegister)
334                 .Is(z0));
335  VIXL_CHECK(CPURegister(1, CPURegister::kUnknownSize, CPURegister::kPRegister)
336                 .Is(p1));
337}
338
339
340#ifdef __aarch64__
341static void CPURegisterByValueHelper(CPURegister reg) {
342  // Test that `reg` can be passed in one register. We'd like to use
343  // __attribute__((naked)) for this, but it isn't supported for AArch64, so
344  // generate a function using VIXL instead.
345
346  MacroAssembler masm;
347  // CPURegister fn(int placeholder, CPURegister reg);
348  // Move `reg` to its result register.
349  __ Mov(x0, x1);
350  // Clobber all other result registers.
351  __ Mov(x1, 0xfffffffffffffff1);
352  __ Mov(x2, 0xfffffffffffffff2);
353  __ Mov(x3, 0xfffffffffffffff3);
354  __ Mov(x4, 0xfffffffffffffff4);
355  __ Mov(x5, 0xfffffffffffffff5);
356  __ Mov(x6, 0xfffffffffffffff6);
357  __ Mov(x7, 0xfffffffffffffff7);
358  __ Ret();
359  masm.FinalizeCode();
360
361  CodeBuffer* buffer = masm.GetBuffer();
362  auto fn = buffer->GetStartAddress<CPURegister (*)(int, CPURegister)>();
363  buffer->SetExecutable();
364  CPURegister out = fn(42, reg);
365
366  VIXL_CHECK(out.Is(reg));
367}
368
369
370TEST(cpureg_by_value) {
371  VIXL_STATIC_ASSERT(sizeof(CPURegister) <= sizeof(void*));
372  // Check some arbitrary registers to try to exercise each encoding field.
373  CPURegisterByValueHelper(x0);
374  CPURegisterByValueHelper(v31.V8H());
375  CPURegisterByValueHelper(z16.VnD());
376  CPURegisterByValueHelper(p15.Merging());
377}
378#endif  // __aarch64__
379
380
381TEST(isvalid) {
382  VIXL_CHECK(!NoReg.IsValid());
383  VIXL_CHECK(!NoVReg.IsValid());
384  VIXL_CHECK(!NoCPUReg.IsValid());
385
386  VIXL_CHECK(x0.IsValid());
387  VIXL_CHECK(w0.IsValid());
388  VIXL_CHECK(x30.IsValid());
389  VIXL_CHECK(w30.IsValid());
390  VIXL_CHECK(xzr.IsValid());
391  VIXL_CHECK(wzr.IsValid());
392
393  VIXL_CHECK(sp.IsValid());
394  VIXL_CHECK(wsp.IsValid());
395
396  VIXL_CHECK(d0.IsValid());
397  VIXL_CHECK(s0.IsValid());
398  VIXL_CHECK(d31.IsValid());
399  VIXL_CHECK(s31.IsValid());
400
401  VIXL_CHECK(x0.IsValidRegister());
402  VIXL_CHECK(w0.IsValidRegister());
403  VIXL_CHECK(xzr.IsValidRegister());
404  VIXL_CHECK(wzr.IsValidRegister());
405  VIXL_CHECK(sp.IsValidRegister());
406  VIXL_CHECK(wsp.IsValidRegister());
407  VIXL_CHECK(!x0.IsValidVRegister());
408  VIXL_CHECK(!w0.IsValidVRegister());
409  VIXL_CHECK(!xzr.IsValidVRegister());
410  VIXL_CHECK(!wzr.IsValidVRegister());
411  VIXL_CHECK(!sp.IsValidVRegister());
412  VIXL_CHECK(!wsp.IsValidVRegister());
413  VIXL_CHECK(!x0.IsValidFPRegister());
414  VIXL_CHECK(!w0.IsValidFPRegister());
415  VIXL_CHECK(!xzr.IsValidFPRegister());
416  VIXL_CHECK(!wzr.IsValidFPRegister());
417  VIXL_CHECK(!sp.IsValidFPRegister());
418  VIXL_CHECK(!wsp.IsValidFPRegister());
419
420  VIXL_CHECK(q0.IsValidVRegister());
421  VIXL_CHECK(!q0.IsValidFPRegister());
422  VIXL_CHECK(!q0.IsValidRegister());
423
424  VIXL_CHECK(d0.IsValidVRegister());
425  VIXL_CHECK(d0.IsValidFPRegister());
426  VIXL_CHECK(!d0.IsValidRegister());
427
428  VIXL_CHECK(s0.IsValidVRegister());
429  VIXL_CHECK(s0.IsValidFPRegister());
430  VIXL_CHECK(!s0.IsValidRegister());
431
432  VIXL_CHECK(h0.IsValidVRegister());
433  VIXL_CHECK(h0.IsValidFPRegister());
434  VIXL_CHECK(!h0.IsValidRegister());
435
436  VIXL_CHECK(b0.IsValidVRegister());
437  VIXL_CHECK(!b0.IsValidFPRegister());
438  VIXL_CHECK(!b0.IsValidRegister());
439
440  // IsValidFPRegister() is only true for scalar types.
441  VIXL_CHECK(q0.V2D().IsValidVRegister());
442  VIXL_CHECK(!q0.V2D().IsValidFPRegister());
443  VIXL_CHECK(d0.V2S().IsValidVRegister());
444  VIXL_CHECK(!d0.V2S().IsValidFPRegister());
445  VIXL_CHECK(s0.V2H().IsValidVRegister());
446  VIXL_CHECK(!s0.V2H().IsValidFPRegister());
447}
448
449
450TEST(isvalid_cpu) {
451  // As 'isvalid', but using CPURegister types where possible. This shouldn't
452  // make any difference.
453  VIXL_CHECK(!static_cast<CPURegister>(NoReg).IsValid());
454  VIXL_CHECK(!static_cast<CPURegister>(NoVReg).IsValid());
455  VIXL_CHECK(!static_cast<CPURegister>(NoCPUReg).IsValid());
456
457  VIXL_CHECK(static_cast<CPURegister>(x0).IsValid());
458  VIXL_CHECK(static_cast<CPURegister>(w0).IsValid());
459  VIXL_CHECK(static_cast<CPURegister>(x30).IsValid());
460  VIXL_CHECK(static_cast<CPURegister>(w30).IsValid());
461  VIXL_CHECK(static_cast<CPURegister>(xzr).IsValid());
462  VIXL_CHECK(static_cast<CPURegister>(wzr).IsValid());
463
464  VIXL_CHECK(static_cast<CPURegister>(sp).IsValid());
465  VIXL_CHECK(static_cast<CPURegister>(wsp).IsValid());
466
467  VIXL_CHECK(static_cast<CPURegister>(d0).IsValid());
468  VIXL_CHECK(static_cast<CPURegister>(s0).IsValid());
469  VIXL_CHECK(static_cast<CPURegister>(d31).IsValid());
470  VIXL_CHECK(static_cast<CPURegister>(s31).IsValid());
471
472  VIXL_CHECK(static_cast<CPURegister>(x0).IsValidRegister());
473  VIXL_CHECK(static_cast<CPURegister>(w0).IsValidRegister());
474  VIXL_CHECK(static_cast<CPURegister>(xzr).IsValidRegister());
475  VIXL_CHECK(static_cast<CPURegister>(wzr).IsValidRegister());
476  VIXL_CHECK(static_cast<CPURegister>(sp).IsValidRegister());
477  VIXL_CHECK(static_cast<CPURegister>(wsp).IsValidRegister());
478  VIXL_CHECK(!static_cast<CPURegister>(x0).IsValidVRegister());
479  VIXL_CHECK(!static_cast<CPURegister>(w0).IsValidVRegister());
480  VIXL_CHECK(!static_cast<CPURegister>(xzr).IsValidVRegister());
481  VIXL_CHECK(!static_cast<CPURegister>(wzr).IsValidVRegister());
482  VIXL_CHECK(!static_cast<CPURegister>(sp).IsValidVRegister());
483  VIXL_CHECK(!static_cast<CPURegister>(wsp).IsValidVRegister());
484  VIXL_CHECK(!static_cast<CPURegister>(x0).IsValidFPRegister());
485  VIXL_CHECK(!static_cast<CPURegister>(w0).IsValidFPRegister());
486  VIXL_CHECK(!static_cast<CPURegister>(xzr).IsValidFPRegister());
487  VIXL_CHECK(!static_cast<CPURegister>(wzr).IsValidFPRegister());
488  VIXL_CHECK(!static_cast<CPURegister>(sp).IsValidFPRegister());
489  VIXL_CHECK(!static_cast<CPURegister>(wsp).IsValidFPRegister());
490
491  VIXL_CHECK(static_cast<CPURegister>(q0).IsValidVRegister());
492  VIXL_CHECK(!static_cast<CPURegister>(q0).IsValidFPRegister());
493  VIXL_CHECK(!static_cast<CPURegister>(q0).IsValidRegister());
494
495  VIXL_CHECK(static_cast<CPURegister>(d0).IsValidVRegister());
496  VIXL_CHECK(static_cast<CPURegister>(d0).IsValidFPRegister());
497  VIXL_CHECK(!static_cast<CPURegister>(d0).IsValidRegister());
498
499  VIXL_CHECK(static_cast<CPURegister>(s0).IsValidVRegister());
500  VIXL_CHECK(static_cast<CPURegister>(s0).IsValidFPRegister());
501  VIXL_CHECK(!static_cast<CPURegister>(s0).IsValidRegister());
502
503  VIXL_CHECK(static_cast<CPURegister>(h0).IsValidVRegister());
504  VIXL_CHECK(static_cast<CPURegister>(h0).IsValidFPRegister());
505  VIXL_CHECK(!static_cast<CPURegister>(h0).IsValidRegister());
506
507  VIXL_CHECK(static_cast<CPURegister>(b0).IsValidVRegister());
508  VIXL_CHECK(!static_cast<CPURegister>(b0).IsValidFPRegister());
509  VIXL_CHECK(!static_cast<CPURegister>(b0).IsValidRegister());
510}
511
512
513TEST(are_consecutive) {
514  VIXL_CHECK(AreConsecutive(b0, NoVReg));
515  VIXL_CHECK(AreConsecutive(b1, b2));
516  VIXL_CHECK(AreConsecutive(b3, b4, b5));
517  VIXL_CHECK(AreConsecutive(b6, b7, b8, b9));
518  VIXL_CHECK(AreConsecutive(h10, NoVReg));
519  VIXL_CHECK(AreConsecutive(h11, h12));
520  VIXL_CHECK(AreConsecutive(h13, h14, h15));
521  VIXL_CHECK(AreConsecutive(h16, h17, h18, h19));
522  VIXL_CHECK(AreConsecutive(s20, NoVReg));
523  VIXL_CHECK(AreConsecutive(s21, s22));
524  VIXL_CHECK(AreConsecutive(s23, s24, s25));
525  VIXL_CHECK(AreConsecutive(s26, s27, s28, s29));
526  VIXL_CHECK(AreConsecutive(d30, NoVReg));
527  VIXL_CHECK(AreConsecutive(d31, d0));
528  VIXL_CHECK(AreConsecutive(d1, d2, d3));
529  VIXL_CHECK(AreConsecutive(d4, d5, d6, d7));
530  VIXL_CHECK(AreConsecutive(q8, NoVReg));
531  VIXL_CHECK(AreConsecutive(q9, q10));
532  VIXL_CHECK(AreConsecutive(q11, q12, q13));
533  VIXL_CHECK(AreConsecutive(q14, q15, q16, q17));
534  VIXL_CHECK(AreConsecutive(v18, NoVReg));
535  VIXL_CHECK(AreConsecutive(v19, v20));
536  VIXL_CHECK(AreConsecutive(v21, v22, v23));
537  VIXL_CHECK(AreConsecutive(v24, v25, v26, v27));
538  VIXL_CHECK(AreConsecutive(b29, h30));
539  VIXL_CHECK(AreConsecutive(s31, d0, q1));
540  VIXL_CHECK(AreConsecutive(v2, b3, h4, s5));
541
542  VIXL_CHECK(!AreConsecutive(b0, b2));
543  VIXL_CHECK(!AreConsecutive(h1, h0));
544  VIXL_CHECK(!AreConsecutive(s31, s1));
545  VIXL_CHECK(!AreConsecutive(d12, d12));
546  VIXL_CHECK(!AreConsecutive(q31, q1));
547
548  VIXL_CHECK(!AreConsecutive(b0, b1, b3));
549  VIXL_CHECK(!AreConsecutive(h4, h5, h6, h6));
550  VIXL_CHECK(!AreConsecutive(d11, d13, NoVReg, d14));
551  VIXL_CHECK(!AreConsecutive(d15, d16, d18, NoVReg));
552  VIXL_CHECK(!AreConsecutive(b26, b28, NoVReg, b29));
553  VIXL_CHECK(!AreConsecutive(s28, s30, NoVReg, NoVReg));
554
555  VIXL_CHECK(AreConsecutive(q19, NoVReg, NoVReg, q22));
556  VIXL_CHECK(AreConsecutive(v23, NoVReg, v25, NoVReg));
557  VIXL_CHECK(AreConsecutive(b26, b27, NoVReg, NoVReg));
558  VIXL_CHECK(AreConsecutive(h28, NoVReg, NoVReg, NoVReg));
559  VIXL_CHECK(AreConsecutive(s30, s31, NoVReg, s2));
560  VIXL_CHECK(AreConsecutive(d3, NoVReg, d6, d7));
561}
562
563
564TEST(sve_p_registers) {
565  enum Qualification { kNone, kZeroing, kMerging, kWithLaneSize };
566  class Helper {
567   public:
568    static Qualification GetQualification(PRegister) { return kNone; }
569    static Qualification GetQualification(PRegisterZ) { return kZeroing; }
570    static Qualification GetQualification(PRegisterM) { return kMerging; }
571    static Qualification GetQualification(PRegisterWithLaneSize) {
572      return kWithLaneSize;
573    }
574  };
575
576  VIXL_CHECK(kNumberOfPRegisters == 16);
577  VIXL_CHECK(p0.GetCode() == 0);
578  VIXL_CHECK(p15.GetCode() == 15);
579  VIXL_CHECK(p14.VnB().GetLaneSizeInBits() == kBRegSize);
580  VIXL_CHECK(p14.VnH().GetLaneSizeInBits() == kHRegSize);
581  VIXL_CHECK(p14.VnS().GetLaneSizeInBits() == kSRegSize);
582  VIXL_CHECK(p14.VnD().GetLaneSizeInBits() == kDRegSize);
583  VIXL_CHECK(p14.VnB().GetLaneSizeInBytes() == kBRegSizeInBytes);
584  VIXL_CHECK(p14.VnH().GetLaneSizeInBytes() == kHRegSizeInBytes);
585  VIXL_CHECK(p14.VnS().GetLaneSizeInBytes() == kSRegSizeInBytes);
586  VIXL_CHECK(p14.VnD().GetLaneSizeInBytes() == kDRegSizeInBytes);
587  VIXL_CHECK(Helper::GetQualification(p1) == kNone);
588  VIXL_CHECK(Helper::GetQualification(p2.Zeroing()) == kZeroing);
589  VIXL_CHECK(Helper::GetQualification(p3.Merging()) == kMerging);
590  VIXL_CHECK(Helper::GetQualification(p4.VnB()) == kWithLaneSize);
591  VIXL_CHECK(Helper::GetQualification(p5.VnH()) == kWithLaneSize);
592  VIXL_CHECK(Helper::GetQualification(p6.VnS()) == kWithLaneSize);
593  VIXL_CHECK(Helper::GetQualification(p7.VnD()) == kWithLaneSize);
594}
595
596
597TEST(sve_z_registers) {
598  VIXL_CHECK(z0.GetCode() == 0);
599  VIXL_CHECK(z31.GetCode() == 31);
600
601  VIXL_CHECK(z0.Is(z0));
602  VIXL_CHECK(!z0.Is(z1));
603  VIXL_CHECK(!z0.Is(v0));
604  VIXL_CHECK(!z0.Is(b0));
605  VIXL_CHECK(!z0.Is(q0));
606
607  VIXL_CHECK(AreAliased(z5, z5));
608  VIXL_CHECK(AreAliased(z5, b5));
609  VIXL_CHECK(AreAliased(b5, z5));
610  VIXL_CHECK(AreAliased(z5, z5.B()));
611  VIXL_CHECK(AreAliased(z5, z5.VnB()));
612
613  VIXL_CHECK(!AreAliased(z6, z7));
614  VIXL_CHECK(!AreAliased(b6, z7));
615  VIXL_CHECK(!AreAliased(x7, z7));
616}
617
618
619TEST(sve_z_registers_vs_neon) {
620  // There are three related register variants to consider in VIXL's API:
621  //
622  //    "b0": NEON: The least-significant byte of v0.
623  //    "v0.B": NEON: v0, with an unspecified number of byte-sized lanes.
624  //    "z0.B": SVE: z0, with an unspecified number of byte-sized lanes.
625  //
626  // The first two cases are indistinguishable in VIXL; both are obtained using
627  // something like `v0.B()`. This is fine for NEON because there is no
628  // ambiguity in practice; the "v0.B" form is always used with an index that
629  // makes the meaning clear.
630
631  VIXL_ASSERT(v6.B().Is(b6));
632  VIXL_ASSERT(v7.H().Is(h7));
633  VIXL_ASSERT(v8.S().Is(s8));
634  VIXL_ASSERT(v9.D().Is(d9));
635
636  VIXL_ASSERT(z6.B().Is(b6));
637  VIXL_ASSERT(z7.H().Is(h7));
638  VIXL_ASSERT(z8.S().Is(s8));
639  VIXL_ASSERT(z9.D().Is(d9));
640
641  // We cannot use the same approach for SVE's "z0.B" because, for example,
642  // `Add(VRegister, ...)` and `Add(ZRegister, ...)` generate different
643  // instructions.
644
645  // Test that the variants can be distinguished with `Is`.
646  VIXL_CHECK(!z6.VnB().Is(b6));
647  VIXL_CHECK(!z7.VnH().Is(h7));
648  VIXL_CHECK(!z8.VnS().Is(s8));
649  VIXL_CHECK(!z9.VnD().Is(d9));
650
651  VIXL_CHECK(!z6.VnB().Is(v6.B()));
652  VIXL_CHECK(!z7.VnH().Is(v7.H()));
653  VIXL_CHECK(!z8.VnS().Is(v8.S()));
654  VIXL_CHECK(!z9.VnD().Is(v9.D()));
655
656  VIXL_CHECK(!z6.VnB().Is(z6.B()));
657  VIXL_CHECK(!z7.VnH().Is(z7.H()));
658  VIXL_CHECK(!z8.VnS().Is(z8.S()));
659  VIXL_CHECK(!z9.VnD().Is(z9.D()));
660
661  // Test that the variants can be distinguished at compile-time using
662  // overloading. VIXL's API relies on this.
663  enum Variant { kNEON, kSVE, kUnknown };
664  class Helper {
665   public:
666    static Variant GetVariant(ZRegister) { return kSVE; }
667    static Variant GetVariant(VRegister) { return kNEON; }
668    static Variant GetVariant(CPURegister) { return kUnknown; }
669  };
670  VIXL_CHECK(Helper::GetVariant(z10.VnB()) == kSVE);
671  VIXL_CHECK(Helper::GetVariant(z11.VnH()) == kSVE);
672  VIXL_CHECK(Helper::GetVariant(z12.VnS()) == kSVE);
673  VIXL_CHECK(Helper::GetVariant(z13.VnD()) == kSVE);
674
675  VIXL_CHECK(Helper::GetVariant(v10.B()) == kNEON);
676  VIXL_CHECK(Helper::GetVariant(v11.H()) == kNEON);
677  VIXL_CHECK(Helper::GetVariant(v12.S()) == kNEON);
678  VIXL_CHECK(Helper::GetVariant(v13.D()) == kNEON);
679
680  VIXL_CHECK(Helper::GetVariant(v10.V16B()) == kNEON);
681  VIXL_CHECK(Helper::GetVariant(v11.V8H()) == kNEON);
682  VIXL_CHECK(Helper::GetVariant(v12.V4S()) == kNEON);
683  VIXL_CHECK(Helper::GetVariant(v13.V2D()) == kNEON);
684
685  VIXL_CHECK(Helper::GetVariant(b10) == kNEON);
686  VIXL_CHECK(Helper::GetVariant(h11) == kNEON);
687  VIXL_CHECK(Helper::GetVariant(s12) == kNEON);
688  VIXL_CHECK(Helper::GetVariant(d13) == kNEON);
689}
690
691
692TEST(move_immediate_helpers) {
693  // Using these helpers to query information (without generating code) should
694  // not crash.
695  MacroAssembler::MoveImmediateHelper(NULL, x0, 0x12345678);
696  MacroAssembler::OneInstrMoveImmediateHelper(NULL, x1, 0xabcdef);
697}
698
699
700TEST(generic_operand_helpers) {
701  GenericOperand invalid_1;
702  GenericOperand invalid_2;
703  GenericOperand reg(x3);
704  GenericOperand mem(MemOperand(sp, 8), kXRegSizeInBytes);
705
706  VIXL_CHECK(!invalid_1.IsValid());
707  VIXL_CHECK(!invalid_2.IsValid());
708
709  VIXL_CHECK(invalid_1.Equals(invalid_1));
710  VIXL_CHECK(invalid_2.Equals(invalid_2));
711  VIXL_CHECK(reg.Equals(reg));
712  VIXL_CHECK(mem.Equals(mem));
713
714  VIXL_CHECK(invalid_1.Equals(invalid_2));
715  VIXL_CHECK(invalid_2.Equals(invalid_1));
716
717  VIXL_CHECK(!invalid_1.Equals(reg));
718  VIXL_CHECK(!invalid_1.Equals(mem));
719  VIXL_CHECK(!reg.Equals(invalid_1));
720  VIXL_CHECK(!reg.Equals(invalid_2));
721  VIXL_CHECK(!reg.Equals(mem));
722  VIXL_CHECK(!mem.Equals(invalid_1));
723  VIXL_CHECK(!mem.Equals(reg));
724}
725
726
727TEST(integer_operand_is) {
728  VIXL_CHECK(IntegerOperand(0).IsZero());
729  VIXL_CHECK(!IntegerOperand(1).IsZero());
730  VIXL_CHECK(!IntegerOperand(-1).IsZero());
731
732  VIXL_CHECK(!IntegerOperand(-0x81).IsIntN(8));
733  VIXL_CHECK(IntegerOperand(-0x80).IsIntN(8));
734  VIXL_CHECK(IntegerOperand(-1).IsIntN(8));
735  VIXL_CHECK(IntegerOperand(0).IsIntN(8));
736  VIXL_CHECK(IntegerOperand(1).IsIntN(8));
737  VIXL_CHECK(IntegerOperand(0x7f).IsIntN(8));
738  VIXL_CHECK(!IntegerOperand(0x80).IsIntN(8));
739
740  VIXL_CHECK(!IntegerOperand(-1).IsUintN(8));
741  VIXL_CHECK(IntegerOperand(0).IsUintN(8));
742  VIXL_CHECK(IntegerOperand(1).IsUintN(8));
743  VIXL_CHECK(IntegerOperand(0xff).IsUintN(8));
744  VIXL_CHECK(!IntegerOperand(0x100).IsUintN(8));
745
746  VIXL_CHECK(IntegerOperand(INT64_MIN).IsIntN(64));
747  VIXL_CHECK(IntegerOperand(0).IsIntN(64));
748  VIXL_CHECK(IntegerOperand(INT64_MAX).IsIntN(64));
749  VIXL_CHECK(!IntegerOperand(0x8000000000000000).IsIntN(64));
750
751  VIXL_CHECK(!IntegerOperand(-1).IsUintN(64));
752  VIXL_CHECK(IntegerOperand(0).IsUintN(64));
753  VIXL_CHECK(IntegerOperand(UINT64_MAX).IsUintN(64));
754
755  VIXL_CHECK(!IntegerOperand(-0x801).FitsInBits(12));
756  VIXL_CHECK(IntegerOperand(-0x800).FitsInBits(12));
757  VIXL_CHECK(IntegerOperand(0).FitsInBits(12));
758  VIXL_CHECK(IntegerOperand(0x7ff).FitsInBits(12));
759  VIXL_CHECK(IntegerOperand(0x800).FitsInBits(12));
760  VIXL_CHECK(IntegerOperand(0xfff).FitsInBits(12));
761  VIXL_CHECK(!IntegerOperand(0x1000).FitsInBits(12));
762
763  VIXL_CHECK(!IntegerOperand(-0x8001).FitsInLane(z0.VnH()));
764  VIXL_CHECK(IntegerOperand(-0x8000).FitsInLane(z0.VnH()));
765  VIXL_CHECK(IntegerOperand(0).FitsInLane(z0.VnH()));
766  VIXL_CHECK(IntegerOperand(0x7fff).FitsInLane(z0.VnH()));
767  VIXL_CHECK(IntegerOperand(0x8000).FitsInLane(z0.VnH()));
768  VIXL_CHECK(IntegerOperand(0xffff).FitsInLane(z0.VnH()));
769  VIXL_CHECK(!IntegerOperand(0x10000).FitsInLane(z0.VnH()));
770}
771
772TEST(integer_operand_as_uint) {
773  // Simple cases.
774  VIXL_CHECK(IntegerOperand(1).AsUintN(8) == 1);
775  VIXL_CHECK(IntegerOperand(1).AsUintN(16) == 1);
776  VIXL_CHECK(IntegerOperand(1).AsUintN(32) == 1);
777  VIXL_CHECK(IntegerOperand(1).AsUintN(64) == 1);
778  VIXL_CHECK(IntegerOperand(-1).AsUintN(8) == 0xff);
779  VIXL_CHECK(IntegerOperand(-1).AsUintN(16) == 0xffff);
780  VIXL_CHECK(IntegerOperand(-1).AsUintN(32) == 0xffffffff);
781  VIXL_CHECK(IntegerOperand(-1).AsUintN(64) == 0xffffffffffffffff);
782  VIXL_CHECK(IntegerOperand(0xf0).AsUintN(8) == 0xf0);
783  VIXL_CHECK(IntegerOperand(0xf420).AsUintN(16) == 0xf420);
784  VIXL_CHECK(IntegerOperand(0xf4242420).AsUintN(32) == 0xf4242420);
785  VIXL_CHECK(IntegerOperand(0xf424242424242420).AsUintN(64) ==
786             0xf424242424242420);
787
788  // Boundary conditions for known-size types.
789  VIXL_CHECK(IntegerOperand(INT8_MIN).AsUintN(8) == 0x80);
790  VIXL_CHECK(IntegerOperand(INT8_MAX).AsUintN(8) == 0x7f);
791  VIXL_CHECK(IntegerOperand(UINT8_MAX).AsUintN(8) == 0xff);
792
793  VIXL_CHECK(IntegerOperand(INT16_MIN).AsUintN(16) == 0x8000);
794  VIXL_CHECK(IntegerOperand(INT16_MAX).AsUintN(16) == 0x7fff);
795  VIXL_CHECK(IntegerOperand(UINT16_MAX).AsUintN(16) == 0xffff);
796
797  VIXL_CHECK(IntegerOperand(INT32_MIN).AsUintN(32) == 0x80000000);
798  VIXL_CHECK(IntegerOperand(INT32_MAX).AsUintN(32) == 0x7fffffff);
799  VIXL_CHECK(IntegerOperand(UINT32_MAX).AsUintN(32) == 0xffffffff);
800
801  VIXL_CHECK(IntegerOperand(INT64_MIN).AsUintN(64) == 0x8000000000000000);
802  VIXL_CHECK(IntegerOperand(INT64_MAX).AsUintN(64) == 0x7fffffffffffffff);
803  VIXL_CHECK(IntegerOperand(UINT64_MAX).AsUintN(64) == 0xffffffffffffffff);
804}
805
806TEST(integer_operand_as_int) {
807  // Simple cases.
808  VIXL_CHECK(IntegerOperand(1).AsIntN(8) == 1);
809  VIXL_CHECK(IntegerOperand(1).AsIntN(16) == 1);
810  VIXL_CHECK(IntegerOperand(1).AsIntN(32) == 1);
811  VIXL_CHECK(IntegerOperand(1).AsIntN(64) == 1);
812  VIXL_CHECK(IntegerOperand(-1).AsIntN(8) == -1);
813  VIXL_CHECK(IntegerOperand(-1).AsIntN(16) == -1);
814  VIXL_CHECK(IntegerOperand(-1).AsIntN(32) == -1);
815  VIXL_CHECK(IntegerOperand(-1).AsIntN(64) == -1);
816  VIXL_CHECK(IntegerOperand(0x70).AsIntN(8) == 0x70);
817  VIXL_CHECK(IntegerOperand(0x7420).AsIntN(16) == 0x7420);
818  VIXL_CHECK(IntegerOperand(0x74242420).AsIntN(32) == 0x74242420);
819  VIXL_CHECK(IntegerOperand(0x7424242424242420).AsIntN(64) ==
820             0x7424242424242420);
821
822  // Boundary conditions for known-size types.
823  VIXL_CHECK(IntegerOperand(UINT8_MAX).AsIntN(8) == -1);
824  VIXL_CHECK(IntegerOperand(UINT16_MAX).AsIntN(16) == -1);
825  VIXL_CHECK(IntegerOperand(UINT32_MAX).AsIntN(32) == -1);
826  VIXL_CHECK(IntegerOperand(UINT64_MAX).AsIntN(64) == -1);
827
828  VIXL_CHECK(IntegerOperand(INT8_MAX).AsIntN(8) == INT8_MAX);
829  VIXL_CHECK(IntegerOperand(INT16_MAX).AsIntN(16) == INT16_MAX);
830  VIXL_CHECK(IntegerOperand(INT32_MAX).AsIntN(32) == INT32_MAX);
831  VIXL_CHECK(IntegerOperand(INT64_MAX).AsIntN(64) == INT64_MAX);
832
833  VIXL_CHECK(IntegerOperand(0x80).AsIntN(8) == INT8_MIN);
834  VIXL_CHECK(IntegerOperand(0x8000).AsIntN(16) == INT16_MIN);
835  VIXL_CHECK(IntegerOperand(0x80000000).AsIntN(32) == INT32_MIN);
836  VIXL_CHECK(IntegerOperand(0x8000000000000000).AsIntN(64) == INT64_MIN);
837}
838
839template <unsigned N>
840class IntegerOperandTryEncodeShiftedIntHelper {
841 public:
842  IntegerOperandTryEncodeShiftedIntHelper() {}
843
844  template <unsigned kShift, typename T>
845  void TestEncodable(T value, const ZRegister& zd, int64_t expected_imm) {
846    VIXL_CHECK(TestImpl<kShift>(value, zd, expected_imm));
847  }
848
849  template <unsigned kShift, typename T>
850  void TestUnencodable(T value, const ZRegister& zd) {
851    // The `expected_imm` value is ignored, so its value is arbitrary.
852    VIXL_CHECK(!TestImpl<kShift>(value, zd, 0));
853  }
854
855 private:
856  template <unsigned kShift, typename T>
857  bool TestImpl(T value, const ZRegister& zd, int64_t expected_imm) {
858    IntegerOperand operand(value);
859    int64_t imm = 0xdeadbeef42;
860    unsigned shift = 0xbeef43;
861    bool success =
862        operand.TryEncodeAsShiftedIntNForLane<N, kShift>(zd, &imm, &shift);
863    if (success) {
864      VIXL_CHECK(imm == expected_imm);
865      VIXL_CHECK(shift == kShift);
866    } else {
867      // Check that the outputs were unmodified.
868      VIXL_CHECK(imm == 0xdeadbeef42);
869      VIXL_CHECK(shift == 0xbeef43);
870    }
871
872    // If kShift is 0, also check TryEncodeAsIntNForLane.
873    if (kShift == 0) {
874      int64_t unshifted_imm = 0xdeadbeef99;
875      bool unshifted_success =
876          operand.TryEncodeAsIntNForLane<N>(zd, &unshifted_imm);
877
878      VIXL_CHECK(unshifted_success == success);
879      if (unshifted_success) {
880        VIXL_CHECK(unshifted_imm == expected_imm);
881      } else {
882        VIXL_CHECK(unshifted_imm == 0xdeadbeef99);
883      }
884    }
885
886    return success;
887  }
888};
889
890TEST(integer_operand_encode_as_intn) {
891  IntegerOperandTryEncodeShiftedIntHelper<4> int4_helper;
892  IntegerOperandTryEncodeShiftedIntHelper<8> int8_helper;
893  IntegerOperandTryEncodeShiftedIntHelper<12> int12_helper;
894
895  // Simple cases, where the value is directly encodable.
896  int4_helper.TestEncodable<0>(-8, z0.VnH(), -8);
897  int4_helper.TestEncodable<0>(-7, z0.VnH(), -7);
898  int4_helper.TestEncodable<0>(-1, z0.VnS(), -1);
899  int4_helper.TestEncodable<0>(0, z0.VnD(), 0);
900  int4_helper.TestEncodable<0>(1, z0.VnB(), 1);
901  int4_helper.TestEncodable<0>(7, z0.VnH(), 7);
902
903  int8_helper.TestEncodable<0>(0x7f, z0.VnB(), 0x7f);
904  int8_helper.TestEncodable<0>(0x7f, z0.VnH(), 0x7f);
905  int12_helper.TestEncodable<0>(0x7ff, z0.VnH(), 0x7ff);
906
907  int8_helper.TestEncodable<0>(-0x80, z0.VnB(), -0x80);
908  int8_helper.TestEncodable<0>(-0x80, z0.VnH(), -0x80);
909  int12_helper.TestEncodable<0>(-0x800, z0.VnH(), -0x800);
910
911  // Cases that are directly encodable with a shift.
912  int8_helper.TestEncodable<4>(-0x800, z0.VnH(), -0x80);
913  int8_helper.TestEncodable<4>(-0x7f0, z0.VnH(), -0x7f);
914  int8_helper.TestEncodable<4>(-0x010, z0.VnH(), -1);
915  int8_helper.TestEncodable<4>(0x000, z0.VnH(), 0);
916  int8_helper.TestEncodable<4>(0x010, z0.VnH(), 1);
917  int8_helper.TestEncodable<4>(0x7f0, z0.VnH(), 0x7f);
918
919  // Ensure that (positive) bit representations of negative values are treated
920  // as negative values, even though their arithmetic values are unencodable.
921  int12_helper.TestEncodable<0>(0xffd6, z0.VnH(), -42);
922  int12_helper.TestEncodable<0>(0xffffffd6, z0.VnS(), -42);
923  int12_helper.TestEncodable<4>(0xfd60, z0.VnH(), -42);
924  int12_helper.TestEncodable<8>(0xffffd600, z0.VnS(), -42);
925
926  int8_helper.TestEncodable<0>(UINT8_MAX, z0.VnB(), -1);
927  int8_helper.TestEncodable<0>(UINT16_MAX, z0.VnH(), -1);
928  int8_helper.TestEncodable<0>(UINT32_MAX, z0.VnS(), -1);
929  int8_helper.TestEncodable<0>(UINT64_MAX, z0.VnD(), -1);
930
931  int4_helper.TestEncodable<1>(UINT8_MAX ^ 0x1, z0.VnB(), -1);
932  int4_helper.TestEncodable<2>(UINT16_MAX ^ 0x3, z0.VnH(), -1);
933  int4_helper.TestEncodable<3>(UINT32_MAX ^ 0x7, z0.VnS(), -1);
934  int4_helper.TestEncodable<4>(UINT64_MAX ^ 0xf, z0.VnD(), -1);
935
936  // Unencodable cases.
937  int8_helper.TestUnencodable<0>(INT16_MAX, z0.VnH());
938  int8_helper.TestUnencodable<0>(INT32_MAX, z0.VnS());
939  int8_helper.TestUnencodable<0>(INT64_MAX, z0.VnD());
940
941  int4_helper.TestUnencodable<0>(0x10, z0.VnB());
942  int4_helper.TestUnencodable<1>(0x20, z0.VnB());
943
944  int12_helper.TestUnencodable<1>(1, z0.VnD());
945  int12_helper.TestUnencodable<12>(1, z0.VnD());
946  int12_helper.TestUnencodable<12>(0x800, z0.VnD());
947}
948
949TEST(static_register_types) {
950  // [WX]Register implicitly casts to Register.
951  XRegister x_x0(0);
952  WRegister w_w0(0);
953  Register r_x0 = x_x0;
954  Register r_w0 = w_w0;
955  VIXL_CHECK(r_x0.Is(x_x0));
956  VIXL_CHECK(x_x0.Is(r_x0));
957  VIXL_CHECK(r_w0.Is(w_w0));
958  VIXL_CHECK(w_w0.Is(r_w0));
959
960  // Register explicitly casts to [WX]Register.
961  Register r_x1(1, kXRegSize);
962  Register r_w1(1, kWRegSize);
963  XRegister x_x1(r_x1);
964  WRegister w_w1(r_w1);
965  VIXL_CHECK(r_x1.Is(x_x1));
966  VIXL_CHECK(x_x1.Is(r_x1));
967  VIXL_CHECK(r_w1.Is(w_w1));
968  VIXL_CHECK(w_w1.Is(r_w1));
969
970  // [WX]Register implicitly casts to CPURegister.
971  XRegister x_x2(2);
972  WRegister w_w2(2);
973  CPURegister cpu_x2 = x_x2;
974  CPURegister cpu_w2 = w_w2;
975  VIXL_CHECK(cpu_x2.Is(x_x2));
976  VIXL_CHECK(x_x2.Is(cpu_x2));
977  VIXL_CHECK(cpu_w2.Is(w_w2));
978  VIXL_CHECK(w_w2.Is(cpu_w2));
979}
980
981
982TEST(operand_is_plain_register) {
983  VIXL_CHECK(Operand(x0).IsPlainRegister());
984  VIXL_CHECK(Operand(x1, LSL, 0).IsPlainRegister());
985  VIXL_CHECK(Operand(x2, LSR, 0).IsPlainRegister());
986  VIXL_CHECK(Operand(x3, ASR, 0).IsPlainRegister());
987  VIXL_CHECK(Operand(x4, ROR, 0).IsPlainRegister());
988  VIXL_CHECK(Operand(x5, UXTX).IsPlainRegister());
989  VIXL_CHECK(Operand(x6, SXTX).IsPlainRegister());
990  VIXL_CHECK(Operand(w7).IsPlainRegister());
991  VIXL_CHECK(Operand(w8, LSL, 0).IsPlainRegister());
992  VIXL_CHECK(Operand(w9, LSR, 0).IsPlainRegister());
993  VIXL_CHECK(Operand(w10, ASR, 0).IsPlainRegister());
994  VIXL_CHECK(Operand(w11, ROR, 0).IsPlainRegister());
995
996  VIXL_CHECK(!Operand(x0, LSL, 1).IsPlainRegister());
997  VIXL_CHECK(!Operand(x1, LSR, 2).IsPlainRegister());
998  VIXL_CHECK(!Operand(x2, ASR, 3).IsPlainRegister());
999  VIXL_CHECK(!Operand(x3, ROR, 4).IsPlainRegister());
1000  VIXL_CHECK(!Operand(x5, UXTX, 1).IsPlainRegister());
1001  VIXL_CHECK(!Operand(x6, SXTX, 2).IsPlainRegister());
1002  VIXL_CHECK(!Operand(w7, LSL, 1).IsPlainRegister());
1003  VIXL_CHECK(!Operand(w8, LSR, 2).IsPlainRegister());
1004  VIXL_CHECK(!Operand(w9, ASR, 3).IsPlainRegister());
1005  VIXL_CHECK(!Operand(w10, ROR, 4).IsPlainRegister());
1006  VIXL_CHECK(!Operand(w11, UXTB).IsPlainRegister());
1007  VIXL_CHECK(!Operand(w12, SXTB).IsPlainRegister());
1008  VIXL_CHECK(!Operand(w13, UXTH).IsPlainRegister());
1009  VIXL_CHECK(!Operand(w14, SXTH).IsPlainRegister());
1010  // UXTW and SXTW could be treated as plain registers in 32-bit contexts, but
1011  // the Operand class doesn't know the context so it has to return false.
1012  VIXL_CHECK(!Operand(w15, UXTW).IsPlainRegister());
1013  VIXL_CHECK(!Operand(w16, SXTW).IsPlainRegister());
1014}
1015
1016
1017TEST(memoperand_is_plain_register) {
1018  VIXL_CHECK(MemOperand(x0).IsPlainRegister());
1019  VIXL_CHECK(MemOperand(sp).IsPlainRegister());
1020  VIXL_CHECK(MemOperand(x1, 0).IsPlainRegister());
1021
1022  VIXL_CHECK(!MemOperand(x2, xzr).IsPlainRegister());
1023  VIXL_CHECK(!MemOperand(x3, xzr, SXTX).IsPlainRegister());
1024  VIXL_CHECK(!MemOperand(x4, xzr, SXTX, 2).IsPlainRegister());
1025  VIXL_CHECK(!MemOperand(x5, wzr, UXTW).IsPlainRegister());
1026  VIXL_CHECK(!MemOperand(x6, wzr, UXTW, 3).IsPlainRegister());
1027
1028  VIXL_CHECK(!MemOperand(x7, 0, PostIndex).IsPlainRegister());
1029  VIXL_CHECK(!MemOperand(x8, 0, PreIndex).IsPlainRegister());
1030  VIXL_CHECK(!MemOperand(x9, xzr, PostIndex).IsPlainRegister());
1031
1032  VIXL_CHECK(!MemOperand(x20, 1).IsPlainRegister());
1033  VIXL_CHECK(!MemOperand(x21, x30).IsPlainRegister());
1034}
1035
1036TEST(memoperand_is_plain_register_or_equivalent) {
1037  VIXL_CHECK(MemOperand(x0).IsEquivalentToPlainRegister());
1038  VIXL_CHECK(MemOperand(sp).IsEquivalentToPlainRegister());
1039  VIXL_CHECK(MemOperand(x1, 0).IsEquivalentToPlainRegister());
1040
1041  VIXL_CHECK(MemOperand(x2, xzr).IsEquivalentToPlainRegister());
1042  VIXL_CHECK(MemOperand(x3, xzr, SXTX).IsEquivalentToPlainRegister());
1043  VIXL_CHECK(MemOperand(x4, xzr, SXTX, 2).IsEquivalentToPlainRegister());
1044  VIXL_CHECK(MemOperand(x5, wzr, UXTW).IsEquivalentToPlainRegister());
1045  VIXL_CHECK(MemOperand(x6, wzr, UXTW, 3).IsEquivalentToPlainRegister());
1046
1047  VIXL_CHECK(MemOperand(x7, 0, PostIndex).IsEquivalentToPlainRegister());
1048  VIXL_CHECK(MemOperand(x8, 0, PreIndex).IsEquivalentToPlainRegister());
1049  VIXL_CHECK(MemOperand(x9, xzr, PostIndex).IsEquivalentToPlainRegister());
1050
1051  VIXL_CHECK(!MemOperand(x20, 1).IsEquivalentToPlainRegister());
1052  VIXL_CHECK(!MemOperand(x21, x30).IsEquivalentToPlainRegister());
1053}
1054
1055TEST(sve_memoperand_is_plain_scalar) {
1056  VIXL_CHECK(SVEMemOperand(x0).IsPlainScalar());
1057  VIXL_CHECK(SVEMemOperand(sp).IsPlainScalar());
1058  VIXL_CHECK(SVEMemOperand(x1, 0).IsPlainScalar());
1059
1060  VIXL_CHECK(!SVEMemOperand(x2, xzr).IsPlainScalar());
1061  VIXL_CHECK(!SVEMemOperand(x4, xzr, LSL, 2).IsPlainScalar());
1062
1063  VIXL_CHECK(!SVEMemOperand(x20, 1).IsPlainScalar());
1064  VIXL_CHECK(!SVEMemOperand(x21, x30).IsPlainScalar());
1065
1066  VIXL_CHECK(!SVEMemOperand(x0, z1.VnD()).IsPlainScalar());
1067  VIXL_CHECK(!SVEMemOperand(x2, z3.VnS(), UXTW).IsPlainScalar());
1068  VIXL_CHECK(!SVEMemOperand(z4.VnD(), 0).IsPlainScalar());
1069}
1070
1071TEST(sve_memoperand_is_scalar_or_equivalent) {
1072  VIXL_CHECK(SVEMemOperand(x0).IsEquivalentToScalar());
1073  VIXL_CHECK(SVEMemOperand(sp).IsEquivalentToScalar());
1074  VIXL_CHECK(SVEMemOperand(x1, 0).IsEquivalentToScalar());
1075
1076  VIXL_CHECK(SVEMemOperand(x2, xzr).IsEquivalentToScalar());
1077  VIXL_CHECK(SVEMemOperand(x4, xzr, LSL, 2).IsEquivalentToScalar());
1078
1079  VIXL_CHECK(!SVEMemOperand(x20, 1).IsEquivalentToScalar());
1080  VIXL_CHECK(!SVEMemOperand(x21, x30).IsEquivalentToScalar());
1081
1082  VIXL_CHECK(!SVEMemOperand(x0, z1.VnD()).IsEquivalentToScalar());
1083  VIXL_CHECK(!SVEMemOperand(x2, z3.VnD(), SXTW).IsEquivalentToScalar());
1084  VIXL_CHECK(!SVEMemOperand(z4.VnD(), 0).IsEquivalentToScalar());
1085}
1086
1087TEST(sve_memoperand_types) {
1088  VIXL_CHECK(SVEMemOperand(x0, 42).IsScalarPlusImmediate());
1089  VIXL_CHECK(SVEMemOperand(x1, 42, SVE_MUL_VL).IsScalarPlusImmediate());
1090  VIXL_CHECK(SVEMemOperand(x2, -42, SVE_MUL_VL).IsScalarPlusImmediate());
1091
1092  VIXL_CHECK(SVEMemOperand(sp, x3).IsScalarPlusScalar());
1093  VIXL_CHECK(SVEMemOperand(x4, xzr).IsScalarPlusScalar());
1094  VIXL_CHECK(SVEMemOperand(x5, x6, LSL, 1).IsScalarPlusScalar());
1095
1096  VIXL_CHECK(SVEMemOperand(x7, z0.VnD()).IsScalarPlusVector());
1097  VIXL_CHECK(SVEMemOperand(x8, z1.VnS(), SXTW).IsScalarPlusVector());
1098  VIXL_CHECK(SVEMemOperand(x9, z2.VnD(), UXTW).IsScalarPlusVector());
1099  VIXL_CHECK(SVEMemOperand(x10, z3.VnD(), LSL, 2).IsScalarPlusVector());
1100
1101  VIXL_CHECK(SVEMemOperand(z4.VnD(), 42).IsVectorPlusImmediate());
1102  VIXL_CHECK(SVEMemOperand(z5.VnS(), -42).IsVectorPlusImmediate());
1103}
1104
1105TEST(sve_memoperand_scatter_gather) {
1106  // Single-address accesses.
1107  VIXL_CHECK(!SVEMemOperand(x0, 42).IsScatterGather());
1108  VIXL_CHECK(!SVEMemOperand(x1, 42, SVE_MUL_VL).IsScatterGather());
1109  VIXL_CHECK(!SVEMemOperand(x2, -42, SVE_MUL_VL).IsScatterGather());
1110
1111  VIXL_CHECK(!SVEMemOperand(sp, x3).IsScatterGather());
1112  VIXL_CHECK(!SVEMemOperand(x4, xzr).IsScatterGather());
1113  VIXL_CHECK(!SVEMemOperand(x5, x6, LSL, 1).IsScatterGather());
1114
1115  // Scatter-gather accesses.
1116  VIXL_CHECK(SVEMemOperand(x7, z0.VnD()).IsScatterGather());
1117  VIXL_CHECK(SVEMemOperand(x8, z1.VnS(), SXTW).IsScatterGather());
1118  VIXL_CHECK(SVEMemOperand(x9, z2.VnD(), UXTW).IsScatterGather());
1119  VIXL_CHECK(SVEMemOperand(x10, z3.VnD(), LSL, 2).IsScatterGather());
1120
1121  VIXL_CHECK(SVEMemOperand(z4.VnD(), 42).IsScatterGather());
1122  VIXL_CHECK(SVEMemOperand(z5.VnS(), -42).IsScatterGather());
1123}
1124
1125TEST(scratch_scope_basic) {
1126  MacroAssembler masm;
1127  // x16 and x17 are available as scratch registers by default.
1128  {
1129    UseScratchRegisterScope temps(&masm);
1130    Register temp1 = temps.AcquireW();
1131    Register temp2 = temps.AcquireX();
1132    VIXL_CHECK(temp1.Is(w16));
1133    VIXL_CHECK(temp2.Is(x17));
1134  }
1135  {
1136    UseScratchRegisterScope temps(&masm);
1137    Register temp1 = temps.AcquireRegisterOfSize(kXRegSize);
1138    Register temp2 = temps.AcquireRegisterOfSize(kWRegSize);
1139    VIXL_CHECK(temp1.Is(x16));
1140    VIXL_CHECK(temp2.Is(w17));
1141  }
1142}
1143
1144TEST(scratch_scope_basic_v) {
1145  MacroAssembler masm;
1146  // v31 is the only V scratch register available by default.
1147  {
1148    UseScratchRegisterScope temps(&masm);
1149    VRegister temp = temps.AcquireH();
1150    VIXL_CHECK(temp.Is(h31));
1151  }
1152  {
1153    UseScratchRegisterScope temps(&masm);
1154    VRegister temp = temps.AcquireS();
1155    VIXL_CHECK(temp.Is(s31));
1156  }
1157  {
1158    UseScratchRegisterScope temps(&masm);
1159    VRegister temp = temps.AcquireD();
1160    VIXL_CHECK(temp.Is(d31));
1161  }
1162  {
1163    UseScratchRegisterScope temps(&masm);
1164    VRegister temp = temps.AcquireVRegisterOfSize(kQRegSize);
1165    VIXL_CHECK(temp.Is(q31));
1166  }
1167  {
1168    UseScratchRegisterScope temps(&masm);
1169    VRegister temp = temps.AcquireVRegisterOfSize(kDRegSize);
1170    VIXL_CHECK(temp.Is(d31));
1171  }
1172  {
1173    UseScratchRegisterScope temps(&masm);
1174    VRegister temp = temps.AcquireVRegisterOfSize(kSRegSize);
1175    VIXL_CHECK(temp.Is(s31));
1176  }
1177}
1178
1179TEST(scratch_scope_basic_z) {
1180  MacroAssembler masm;
1181  // z31 is the only Z scratch register available by default.
1182  {
1183    UseScratchRegisterScope temps(&masm);
1184    VIXL_CHECK(temps.IsAvailable(v31));
1185    VIXL_CHECK(temps.IsAvailable(z31));
1186    ZRegister temp = temps.AcquireZ();
1187    VIXL_CHECK(temp.Is(z31));
1188    // Check that allocating a Z register properly reserves the corresponding V
1189    // register.
1190    VIXL_CHECK(!temps.IsAvailable(v31));
1191    VIXL_CHECK(!temps.IsAvailable(z31));
1192  }
1193  // Check that the destructor restored the acquired register.
1194  UseScratchRegisterScope temps(&masm);
1195  VIXL_CHECK(temps.IsAvailable(v31));
1196  VIXL_CHECK(temps.IsAvailable(z31));
1197}
1198
1199TEST(scratch_scope_basic_p) {
1200  MacroAssembler masm;
1201  {
1202    UseScratchRegisterScope temps(&masm);
1203    // There are no P scratch registers available by default.
1204    VIXL_CHECK(masm.GetScratchPRegisterList()->IsEmpty());
1205    temps.Include(p0, p1);
1206    VIXL_CHECK(temps.IsAvailable(p0));
1207    VIXL_CHECK(temps.IsAvailable(p1));
1208    temps.Include(p7, p8, p15);
1209    VIXL_CHECK(temps.IsAvailable(p7));
1210    VIXL_CHECK(temps.IsAvailable(p8));
1211    VIXL_CHECK(temps.IsAvailable(p15));
1212
1213    // AcquireGoverningP() can only return p0-p7.
1214    VIXL_CHECK(temps.AcquireGoverningP().GetCode() <
1215               kNumberOfGoverningPRegisters);
1216    VIXL_CHECK(temps.AcquireGoverningP().GetCode() <
1217               kNumberOfGoverningPRegisters);
1218    VIXL_CHECK(temps.IsAvailable(p8));
1219    VIXL_CHECK(temps.IsAvailable(p15));
1220
1221    // AcquireP() prefers p8-p15, ...
1222    VIXL_CHECK(temps.AcquireP().GetCode() >= kNumberOfGoverningPRegisters);
1223    VIXL_CHECK(temps.AcquireP().GetCode() >= kNumberOfGoverningPRegisters);
1224    // ... but will return p0-p7 if none of p8-p15 are available.
1225    VIXL_CHECK(temps.AcquireP().GetCode() < kNumberOfGoverningPRegisters);
1226
1227    VIXL_CHECK(masm.GetScratchPRegisterList()->IsEmpty());
1228
1229    // Leave some registers available so we can test the destructor.
1230    temps.Include(p3, p6, p9, p12);
1231    VIXL_CHECK(!masm.GetScratchPRegisterList()->IsEmpty());
1232  }
1233  // Check that the destructor correctly cleared the list.
1234  VIXL_CHECK(masm.GetScratchPRegisterList()->IsEmpty());
1235}
1236
1237TEST(scratch_scope_include_ignored) {
1238  MacroAssembler masm;
1239  {
1240    UseScratchRegisterScope temps(&masm);
1241    // Start with an empty set of scratch registers.
1242    temps.ExcludeAll();
1243
1244    // Including NoReg has no effect.
1245    temps.Include(NoReg);
1246    temps.Include(NoCPUReg);
1247    temps.Include(CPURegList(CPURegister::kNoRegister, 0, 0));
1248
1249    // Including sp or zr has no effect, since they are never appropriate
1250    // scratch registers.
1251    temps.Include(sp);
1252    temps.Include(xzr, wsp);
1253    temps.Include(wzr);
1254    temps.Include(CPURegList(xzr, sp));
1255
1256    VIXL_CHECK(masm.GetScratchRegisterList()->IsEmpty());
1257    VIXL_CHECK(masm.GetScratchVRegisterList()->IsEmpty());
1258  }
1259}
1260
1261class ScratchScopeHelper {
1262 public:
1263  enum Action { kRelease, kInclude, kExclude };
1264
1265  ScratchScopeHelper(MacroAssembler* masm,
1266                     Action action,
1267                     CPURegister::RegisterType type)
1268      : masm_(masm),
1269        action_(action),
1270        type_(type),
1271        expected_(GetGuardListFor(CPURegister::kRegister)),
1272        expected_v_(GetGuardListFor(CPURegister::kVRegister)),
1273        expected_p_(GetGuardListFor(CPURegister::kPRegister)) {
1274    *GetExpectedFor(type) = GetInitialList();
1275    masm->GetScratchRegisterList()->SetList(expected_);
1276    masm->GetScratchVRegisterList()->SetList(expected_v_);
1277    masm->GetScratchPRegisterList()->SetList(expected_p_);
1278  }
1279
1280  // Notify the helper that the registers in `update` have been passed into
1281  // DoAction(), and assert that the MacroAssembler's scratch lists are as
1282  // expected.
1283  void RecordActionsAndCheck(RegList update) {
1284    RegList* expected = GetExpectedFor(type_);
1285    switch (action_) {
1286      case kRelease:
1287        // It isn't valid to release a register that is already available.
1288        VIXL_CHECK((*expected & update) == 0);
1289        VIXL_FALLTHROUGH();
1290      case kInclude:
1291        *expected |= update;
1292        break;
1293      case kExclude:
1294        *expected &= ~update;
1295        break;
1296    }
1297    VIXL_CHECK(masm_->GetScratchRegisterList()->GetList() == expected_);
1298    VIXL_CHECK(masm_->GetScratchVRegisterList()->GetList() == expected_v_);
1299    VIXL_CHECK(masm_->GetScratchPRegisterList()->GetList() == expected_p_);
1300  }
1301
1302 private:
1303  RegList GetInitialList() {
1304    switch (action_) {
1305      case kRelease:
1306      case kInclude:
1307        return 0;
1308      case kExclude:
1309        return GetPotentialListFor(type_);
1310    }
1311    VIXL_UNREACHABLE();
1312    return 0;
1313  }
1314
1315  // Return some valid, non-zero RegList suitable for use as a guard value.
1316  static RegList GetGuardListFor(CPURegister::RegisterType type) {
1317    return (0x1111111111111111 * (type + 1)) & GetPotentialListFor(type);
1318  }
1319
1320  static RegList GetPotentialListFor(CPURegister::RegisterType type) {
1321    RegList list = CPURegList::All(type).GetList();
1322    // The zr and sp registers cannot be scratch registers.
1323    if (type == CPURegister::kRegister) list &= ~(xzr.GetBit() | sp.GetBit());
1324    return list;
1325  }
1326
1327  RegList* GetExpectedFor(CPURegister::RegisterType type) {
1328    switch (type) {
1329      case CPURegister::kNoRegister:
1330        VIXL_UNREACHABLE();
1331        return NULL;
1332      case CPURegister::kRegister:
1333        return &expected_;
1334      case CPURegister::kVRegister:
1335      case CPURegister::kZRegister:
1336        return &expected_v_;
1337      case CPURegister::kPRegister:
1338        return &expected_p_;
1339    }
1340    VIXL_UNREACHABLE();
1341    return NULL;
1342  }
1343
1344  MacroAssembler* masm_;
1345  Action action_;
1346  CPURegister::RegisterType type_;
1347
1348  RegList expected_;
1349  RegList expected_v_;
1350  RegList expected_p_;
1351};
1352
1353TEST(scratch_scope_include) {
1354  MacroAssembler masm;
1355  {
1356    UseScratchRegisterScope temps(&masm);
1357    ScratchScopeHelper helper(&masm,
1358                              ScratchScopeHelper::kInclude,
1359                              CPURegister::kRegister);
1360
1361    // Any suitable register type deriving from CPURegister can be included.
1362    temps.Include(w0);
1363    temps.Include(x1);
1364    temps.Include(WRegister(2));
1365    temps.Include(XRegister(3));
1366    temps.Include(Register(w4));
1367    temps.Include(Register(x5));
1368    temps.Include(CPURegister(w6));
1369    temps.Include(CPURegister(x7));
1370    helper.RecordActionsAndCheck(0xff);
1371    // Multiple registers can be included at once.
1372    temps.Include(x8, w9, x10);
1373    temps.Include(Register(w12), Register(x13), Register(w14));
1374    temps.Include(XRegister(16), XRegister(17), XRegister(18));
1375    temps.Include(WRegister(20), WRegister(21), WRegister(22));
1376    temps.Include(CPURegList(w24, w25, w26));
1377    helper.RecordActionsAndCheck(0x7777700);
1378    // Including a register again has no effect.
1379    temps.Include(Register(w26));
1380    temps.Include(Register(x25));
1381    temps.Include(CPURegister(x24));
1382    temps.Include(CPURegister(x22));
1383    temps.Include(x21, x20, w18, x17);
1384    temps.Include(CPURegList(x16, x14, x13, x12));
1385    helper.RecordActionsAndCheck(0x7777700);
1386  }
1387}
1388
1389TEST(scratch_scope_exclude) {
1390  MacroAssembler masm;
1391  {
1392    UseScratchRegisterScope temps(&masm);
1393    ScratchScopeHelper helper(&masm,
1394                              ScratchScopeHelper::kExclude,
1395                              CPURegister::kRegister);
1396
1397    // Any suitable register type deriving from CPURegister can be excluded.
1398    temps.Exclude(w0);
1399    temps.Exclude(x1);
1400    temps.Exclude(WRegister(2));
1401    temps.Exclude(XRegister(3));
1402    temps.Exclude(Register(w4));
1403    temps.Exclude(Register(x5));
1404    temps.Exclude(CPURegister(w6));
1405    temps.Exclude(CPURegister(x7));
1406    helper.RecordActionsAndCheck(0xff);
1407    // Multiple registers can be excluded at once.
1408    temps.Exclude(x8, w9, x10);
1409    temps.Exclude(Register(w12), Register(x13), Register(w14));
1410    temps.Exclude(XRegister(16), XRegister(17), XRegister(18));
1411    temps.Exclude(WRegister(20), WRegister(21), WRegister(22));
1412    temps.Exclude(CPURegList(w24, w25, w26));
1413    helper.RecordActionsAndCheck(0x7777700);
1414    // Excluding a register again has no effect.
1415    temps.Exclude(Register(w26));
1416    temps.Exclude(Register(x25));
1417    temps.Exclude(CPURegister(x24));
1418    temps.Exclude(CPURegister(x22));
1419    temps.Exclude(x21, x20, w18, x17);
1420    temps.Exclude(CPURegList(x16, x14, x13, x12));
1421    helper.RecordActionsAndCheck(0x7777700);
1422  }
1423}
1424
1425TEST(scratch_scope_release) {
1426  MacroAssembler masm;
1427  {
1428    UseScratchRegisterScope temps(&masm);
1429    ScratchScopeHelper helper(&masm,
1430                              ScratchScopeHelper::kRelease,
1431                              CPURegister::kRegister);
1432
1433    // Any suitable register type deriving from CPURegister can be released.
1434    temps.Release(w0);
1435    temps.Release(x1);
1436    temps.Release(WRegister(2));
1437    temps.Release(XRegister(3));
1438    temps.Release(Register(w4));
1439    temps.Release(Register(x5));
1440    temps.Release(CPURegister(w6));
1441    temps.Release(CPURegister(x7));
1442    helper.RecordActionsAndCheck(0xff);
1443    // It is not possible to release more than one register at a time, and it is
1444    // invalid to release a register that is already available.
1445  }
1446}
1447
1448TEST(scratch_scope_include_v) {
1449  MacroAssembler masm;
1450  {
1451    UseScratchRegisterScope temps(&masm);
1452    ScratchScopeHelper helper(&masm,
1453                              ScratchScopeHelper::kInclude,
1454                              CPURegister::kVRegister);
1455
1456    // Any suitable register type deriving from CPURegister can be included.
1457    temps.Include(b0);
1458    temps.Include(h1);
1459    temps.Include(SRegister(2));
1460    temps.Include(DRegister(3));
1461    temps.Include(VRegister(q4));
1462    temps.Include(VRegister(v5.V8B()));
1463    temps.Include(CPURegister(d6));
1464    temps.Include(CPURegister(v7.S4B()));
1465    helper.RecordActionsAndCheck(0xff);
1466    // Multiple registers can be included at once.
1467    temps.Include(b8, h9, s10);
1468    temps.Include(VRegister(d12), VRegister(d13), VRegister(d14));
1469    temps.Include(QRegister(16), QRegister(17), QRegister(18));
1470    temps.Include(BRegister(20), BRegister(21), BRegister(22));
1471    temps.Include(CPURegList(s24, s25, s26));
1472    helper.RecordActionsAndCheck(0x7777700);
1473    // Including a register again has no effect.
1474    temps.Include(VRegister(b26));
1475    temps.Include(VRegister(h25));
1476    temps.Include(CPURegister(s24));
1477    temps.Include(CPURegister(v22.V4H()));
1478    temps.Include(q21, d20, s18, h17);
1479    temps.Include(CPURegList(h16, h14, h13, h12));
1480    helper.RecordActionsAndCheck(0x7777700);
1481  }
1482}
1483
1484TEST(scratch_scope_exclude_v) {
1485  MacroAssembler masm;
1486  {
1487    UseScratchRegisterScope temps(&masm);
1488    ScratchScopeHelper helper(&masm,
1489                              ScratchScopeHelper::kExclude,
1490                              CPURegister::kVRegister);
1491
1492    // Any suitable register type deriving from CPURegister can be excluded.
1493    temps.Exclude(b0);
1494    temps.Exclude(h1);
1495    temps.Exclude(SRegister(2));
1496    temps.Exclude(DRegister(3));
1497    temps.Exclude(VRegister(q4));
1498    temps.Exclude(VRegister(v5.V8B()));
1499    temps.Exclude(CPURegister(d6));
1500    temps.Exclude(CPURegister(v7.S4B()));
1501    helper.RecordActionsAndCheck(0xff);
1502    // Multiple registers can be excluded at once.
1503    temps.Exclude(b8, h9, s10);
1504    temps.Exclude(VRegister(d12), VRegister(d13), VRegister(d14));
1505    temps.Exclude(QRegister(16), QRegister(17), QRegister(18));
1506    temps.Exclude(BRegister(20), BRegister(21), BRegister(22));
1507    temps.Exclude(CPURegList(s24, s25, s26));
1508    helper.RecordActionsAndCheck(0x7777700);
1509    // Excluding a register again has no effect.
1510    temps.Exclude(VRegister(b26));
1511    temps.Exclude(VRegister(h25));
1512    temps.Exclude(CPURegister(s24));
1513    temps.Exclude(CPURegister(v22.V4H()));
1514    temps.Exclude(q21, d20, s18, h17);
1515    temps.Exclude(CPURegList(h16, h14, h13, h12));
1516    helper.RecordActionsAndCheck(0x7777700);
1517  }
1518}
1519
1520TEST(scratch_scope_release_v) {
1521  MacroAssembler masm;
1522  {
1523    UseScratchRegisterScope temps(&masm);
1524    ScratchScopeHelper helper(&masm,
1525                              ScratchScopeHelper::kRelease,
1526                              CPURegister::kVRegister);
1527
1528    // Any suitable register type deriving from CPURegister can be released.
1529    temps.Release(b0);
1530    temps.Release(h1);
1531    temps.Release(SRegister(2));
1532    temps.Release(DRegister(3));
1533    temps.Release(VRegister(q4));
1534    temps.Release(VRegister(v5.V8B()));
1535    temps.Release(CPURegister(d6));
1536    temps.Release(CPURegister(v7.S4B()));
1537    helper.RecordActionsAndCheck(0xff);
1538    // It is not possible to release more than one register at a time, and it is
1539    // invalid to release a register that is already available.
1540  }
1541}
1542
1543TEST(scratch_scope_include_z) {
1544  MacroAssembler masm;
1545  {
1546    UseScratchRegisterScope temps(&masm);
1547    ScratchScopeHelper helper(&masm,
1548                              ScratchScopeHelper::kInclude,
1549                              CPURegister::kZRegister);
1550
1551    // Any suitable register type deriving from CPURegister can be included.
1552    temps.Include(z0);
1553    temps.Include(z1.VnB());
1554    temps.Include(ZRegister(2));
1555    temps.Include(ZRegister(3, kFormatVnD));
1556    temps.Include(CPURegister(z4));
1557    temps.Include(CPURegister(z5.VnH()));
1558    helper.RecordActionsAndCheck(0x3f);
1559    // Multiple registers can be included at once.
1560    temps.Include(z8, z9, z10.VnS());
1561    temps.Include(ZRegister(12), ZRegister(13, kHRegSize), z14);
1562    temps.Include(CPURegList(z16, z17, z18));
1563    helper.RecordActionsAndCheck(0x77700);
1564    // Including a register again has no effect.
1565    temps.Include(ZRegister(18));
1566    temps.Include(ZRegister(17, kFormatVnB));
1567    temps.Include(CPURegister(z16));
1568    temps.Include(CPURegister(z13.VnD()));
1569    temps.Include(z12, z10, z9.VnB(), z8);
1570    temps.Include(CPURegList(z5, z4, z3, z2));
1571    helper.RecordActionsAndCheck(0x77700);
1572  }
1573}
1574
1575TEST(scratch_scope_exclude_z) {
1576  MacroAssembler masm;
1577  {
1578    UseScratchRegisterScope temps(&masm);
1579    ScratchScopeHelper helper(&masm,
1580                              ScratchScopeHelper::kExclude,
1581                              CPURegister::kZRegister);
1582
1583    // Any suitable register type deriving from CPURegister can be excluded.
1584    temps.Exclude(z0);
1585    temps.Exclude(z1.VnB());
1586    temps.Exclude(ZRegister(2));
1587    temps.Exclude(ZRegister(3, kFormatVnD));
1588    temps.Exclude(CPURegister(z4));
1589    temps.Exclude(CPURegister(z5.VnH()));
1590    helper.RecordActionsAndCheck(0x3f);
1591    // Multiple registers can be excluded at once.
1592    temps.Exclude(z8, z9, z10.VnS());
1593    temps.Exclude(ZRegister(12), ZRegister(13, kHRegSize), z14);
1594    temps.Exclude(CPURegList(z16, z17, z18));
1595    helper.RecordActionsAndCheck(0x77700);
1596    // Excluding a register again has no effect.
1597    temps.Exclude(ZRegister(18));
1598    temps.Exclude(ZRegister(17, kFormatVnB));
1599    temps.Exclude(CPURegister(z16));
1600    temps.Exclude(CPURegister(z13.VnD()));
1601    temps.Exclude(z12, z10, z9.VnB(), z8);
1602    temps.Exclude(CPURegList(z5, z4, z3, z2));
1603    helper.RecordActionsAndCheck(0x77700);
1604  }
1605}
1606
1607TEST(scratch_scope_release_z) {
1608  MacroAssembler masm;
1609  {
1610    UseScratchRegisterScope temps(&masm);
1611    ScratchScopeHelper helper(&masm,
1612                              ScratchScopeHelper::kRelease,
1613                              CPURegister::kZRegister);
1614
1615    // Any suitable register type deriving from CPURegister can be released.
1616    temps.Release(z0);
1617    temps.Release(z1.VnB());
1618    temps.Release(ZRegister(2));
1619    temps.Release(ZRegister(3, kFormatVnD));
1620    temps.Release(CPURegister(z4));
1621    temps.Release(CPURegister(z5.VnH()));
1622    helper.RecordActionsAndCheck(0x3f);
1623    // It is not possible to release more than one register at a time, and it is
1624    // invalid to release a register that is already available.
1625  }
1626}
1627
1628TEST(scratch_scope_include_p) {
1629  MacroAssembler masm;
1630  {
1631    UseScratchRegisterScope temps(&masm);
1632    ScratchScopeHelper helper(&masm,
1633                              ScratchScopeHelper::kInclude,
1634                              CPURegister::kPRegister);
1635
1636    // Any suitable register type deriving from CPURegister can be included.
1637    temps.Include(p0);
1638    temps.Include(PRegister(1));
1639    temps.Include(PRegisterWithLaneSize(2, kFormatVnD));
1640    temps.Include(PRegisterM(3));
1641    temps.Include(CPURegister(PRegister(4)));
1642    temps.Include(CPURegister(PRegisterZ(5)));
1643    helper.RecordActionsAndCheck(0x3f);
1644    // Multiple registers can be included at once.
1645    temps.Include(p7, p8.Merging(), p9.VnS());
1646    temps.Include(PRegister(11), PRegisterWithLaneSize(12, kHRegSize));
1647    temps.Include(CPURegList(p15));
1648    helper.RecordActionsAndCheck(0x9b80);
1649    // Including a register again has no effect.
1650    temps.Include(PRegister(15));
1651    temps.Include(PRegisterWithLaneSize(12, kFormatVnB));
1652    temps.Include(CPURegister(p11));
1653    temps.Include(CPURegister(p9.VnD()));
1654    temps.Include(p8.Merging(), p7.Zeroing(), p5.VnB(), p4);
1655    temps.Include(CPURegList(p3, p2, p1, p0));
1656    helper.RecordActionsAndCheck(0x9b80);
1657  }
1658}
1659
1660TEST(scratch_scope_exclude_p) {
1661  MacroAssembler masm;
1662  {
1663    UseScratchRegisterScope temps(&masm);
1664    ScratchScopeHelper helper(&masm,
1665                              ScratchScopeHelper::kExclude,
1666                              CPURegister::kPRegister);
1667
1668    // Any suitable register type deriving from CPURegister can be excluded.
1669    temps.Exclude(p0);
1670    temps.Exclude(PRegister(1));
1671    temps.Exclude(PRegisterWithLaneSize(2, kFormatVnD));
1672    temps.Exclude(PRegisterM(3));
1673    temps.Exclude(CPURegister(PRegister(4)));
1674    temps.Exclude(CPURegister(PRegisterZ(5)));
1675    helper.RecordActionsAndCheck(0x3f);
1676    // Multiple registers can be excluded at once.
1677    temps.Exclude(p7, p8.Merging(), p9.VnS());
1678    temps.Exclude(PRegister(11), PRegisterWithLaneSize(12, kHRegSize));
1679    temps.Exclude(CPURegList(p15));
1680    helper.RecordActionsAndCheck(0x9b80);
1681    // Excluding a register again has no effect.
1682    temps.Exclude(PRegister(15));
1683    temps.Exclude(PRegisterWithLaneSize(12, kFormatVnB));
1684    temps.Exclude(CPURegister(p11));
1685    temps.Exclude(CPURegister(p9.VnD()));
1686    temps.Exclude(p8.Merging(), p7.Zeroing(), p5.VnB(), p4);
1687    temps.Exclude(CPURegList(p3, p2, p1, p0));
1688    helper.RecordActionsAndCheck(0x9b80);
1689  }
1690}
1691
1692TEST(scratch_scope_release_p) {
1693  MacroAssembler masm;
1694  {
1695    UseScratchRegisterScope temps(&masm);
1696    ScratchScopeHelper helper(&masm,
1697                              ScratchScopeHelper::kRelease,
1698                              CPURegister::kPRegister);
1699
1700    // Any suitable register type deriving from CPURegister can be excluded.
1701    temps.Release(p0);
1702    temps.Release(PRegister(1));
1703    temps.Release(PRegisterWithLaneSize(2, kFormatVnD));
1704    temps.Release(PRegisterM(3));
1705    temps.Release(CPURegister(PRegister(4)));
1706    temps.Release(CPURegister(PRegisterZ(5)));
1707    helper.RecordActionsAndCheck(0x3f);
1708    // It is not possible to release more than one register at a time, and it is
1709    // invalid to release a register that is already available.
1710  }
1711}
1712
1713#ifdef VIXL_INCLUDE_SIMULATOR_AARCH64
1714TEST(sim_stack_default) {
1715  SimStack::Allocated s = SimStack().Allocate();
1716
1717  // The default stack is at least 16-byte aligned.
1718  VIXL_CHECK(IsAligned<16>(s.GetBase()));
1719  VIXL_CHECK(IsAligned<16>(s.GetLimit() + 1));
1720
1721  VIXL_CHECK(s.GetBase() > s.GetLimit());
1722
1723  // The default guard regions are sufficient to detect at least off-by-one
1724  // errors.
1725  VIXL_CHECK(s.IsAccessInGuardRegion(s.GetBase(), 1));
1726  VIXL_CHECK(!s.IsAccessInGuardRegion(s.GetBase() - 1, 1));
1727  // The limit is one below the lowest address on the stack.
1728  VIXL_CHECK(s.IsAccessInGuardRegion(s.GetLimit(), 1));
1729  VIXL_CHECK(!s.IsAccessInGuardRegion(s.GetLimit() + 1, 1));
1730
1731  // We need to be able to access 16-byte granules at both extremes.
1732  VIXL_CHECK(!s.IsAccessInGuardRegion(s.GetBase() - 16, 16));
1733  VIXL_CHECK(!s.IsAccessInGuardRegion(s.GetLimit() + 1, 16));
1734}
1735
1736TEST(sim_stack) {
1737  SimStack builder;
1738  builder.AlignToBytesLog2(WhichPowerOf2(1024));
1739  builder.SetBaseGuardSize(42);
1740  builder.SetLimitGuardSize(2049);
1741  builder.SetUsableSize(2048);
1742  SimStack::Allocated s = builder.Allocate();
1743
1744  VIXL_CHECK(IsAligned<1024>(s.GetBase()));
1745  VIXL_CHECK(IsAligned<1024>(s.GetLimit() + 1));
1746
1747  // The stack is accessible for (limit, base), both exclusive.
1748  // This is checked precisely, using the base and limit modified to respect
1749  // alignment, so we can test the exact boundary condition.
1750  VIXL_CHECK(s.IsAccessInGuardRegion(s.GetBase(), 1));
1751  VIXL_CHECK(!s.IsAccessInGuardRegion(s.GetBase() - 1, 1));
1752  VIXL_CHECK(s.IsAccessInGuardRegion(s.GetLimit(), 1));
1753  VIXL_CHECK(!s.IsAccessInGuardRegion(s.GetLimit() + 1, 1));
1754  VIXL_CHECK((s.GetBase() - s.GetLimit() - 1) == 2048);
1755
1756  // We can access the whole range (limit, base), both exclusive.
1757  VIXL_CHECK(!s.IsAccessInGuardRegion(s.GetLimit() + 1, 2048));
1758  // Off-by-one.
1759  VIXL_CHECK(s.IsAccessInGuardRegion(s.GetLimit(), 2048));
1760  VIXL_CHECK(s.IsAccessInGuardRegion(s.GetLimit() + 1, 2049));
1761  // Accesses spanning whole guard regions.
1762  VIXL_CHECK(s.IsAccessInGuardRegion(s.GetBase() - 42, 4096));
1763  VIXL_CHECK(s.IsAccessInGuardRegion(s.GetLimit() - 1280, 2048));
1764  VIXL_CHECK(s.IsAccessInGuardRegion(s.GetLimit() - 1280, 10000));
1765}
1766#endif
1767
1768}  // namespace aarch64
1769}  // namespace vixl
1770