1 // Copyright 2019, VIXL authors 2 // All rights reserved. 3 // 4 // Redistribution and use in source and binary forms, with or without 5 // modification, are permitted provided that the following conditions are met: 6 // 7 // * Redistributions of source code must retain the above copyright notice, 8 // this list of conditions and the following disclaimer. 9 // * Redistributions in binary form must reproduce the above copyright notice, 10 // this list of conditions and the following disclaimer in the documentation 11 // and/or other materials provided with the distribution. 12 // * Neither the name of ARM Limited nor the names of its contributors may be 13 // used to endorse or promote products derived from this software without 14 // specific prior written permission. 15 // 16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS CONTRIBUTORS "AS IS" AND 17 // ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED 18 // WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 19 // DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE 20 // FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 21 // DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 22 // SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 23 // CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, 24 // OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 25 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 26 // Test infrastructure. 27 // 28 // Tests are functions which accept no parameters and have no return values. 29 // The testing code should not perform an explicit return once completed. For 30 // example to test the mov immediate instruction a very simple test would be: 31 // 32 // TEST(mov_x0_one) { 33 // SETUP(); 34 // 35 // START(); 36 // __ mov(x0, 1); 37 // END(); 38 // 39 // if (CAN_RUN()) { 40 // RUN(); 41 // 42 // ASSERT_EQUAL_64(1, x0); 43 // } 44 // } 45 // 46 // Within a START ... END block all registers but sp can be modified. sp has to 47 // be explicitly saved/restored. The END() macro replaces the function return 48 // so it may appear multiple times in a test if the test has multiple exit 49 // points. 50 // 51 // Tests requiring specific CPU features should specify exactly what they 52 // require using SETUP_WITH_FEATURES(...) instead of SETUP(). 53 // 54 // Once the test has been run all integer and floating point registers as well 55 // as flags are accessible through a RegisterDump instance, see 56 // utils-aarch64.cc for more info on RegisterDump. 57 // 58 // We provide some helper assert to handle common cases: 59 // 60 // ASSERT_EQUAL_32(int32_t, int_32t) 61 // ASSERT_EQUAL_FP32(float, float) 62 // ASSERT_EQUAL_32(int32_t, W register) 63 // ASSERT_EQUAL_FP32(float, S register) 64 // ASSERT_EQUAL_64(int64_t, int_64t) 65 // ASSERT_EQUAL_FP64(double, double) 66 // ASSERT_EQUAL_64(int64_t, X register) 67 // ASSERT_EQUAL_64(X register, X register) 68 // ASSERT_EQUAL_FP64(double, D register) 69 // 70 // e.g. ASSERT_EQUAL_64(0.5, d30); 71 // 72 // If more advanced computation is required before the assert then access the 73 // RegisterDump named core directly: 74 // 75 // ASSERT_EQUAL_64(0x1234, core->reg_x0() & 0xffff); 76 77 namespace vixl { 78 namespace aarch64 { 79 80 #define __ masm. 81 #define TEST(name) TEST_(AARCH64_ASM_##name) 82 83 #ifdef VIXL_INCLUDE_SIMULATOR_AARCH64 84 // Run tests with the simulator. 85 86 #define SETUP() \ 87 MacroAssembler masm; \ 88 SETUP_COMMON(); \ 89 SETUP_COMMON_SIM() 90 91 #define SETUP_WITH_FEATURES(...) \ 92 MacroAssembler masm; \ 93 SETUP_COMMON(); \ 94 SETUP_COMMON_SIM(); \ 95 masm.SetCPUFeatures(CPUFeatures(__VA_ARGS__)); \ 96 simulator.SetCPUFeatures(CPUFeatures(__VA_ARGS__)) 97 98 #define SETUP_CUSTOM(size, pic) \ 99 MacroAssembler masm(size + CodeBuffer::kDefaultCapacity, pic); \ 100 SETUP_COMMON(); \ 101 SETUP_COMMON_SIM() 102 103 #define SETUP_CUSTOM_SIM(...) \ 104 MacroAssembler masm; \ 105 SETUP_COMMON(); \ 106 Simulator simulator(&simulator_decoder, stdout, __VA_ARGS__); \ 107 simulator.SetColouredTrace(Test::coloured_trace()); \ 108 simulator.SetCPUFeatures(CPUFeatures::None()) 109 110 #define SETUP_COMMON() \ 111 bool queried_can_run = false; \ 112 bool printed_sve_lane_warning = false; \ 113 /* Avoid unused-variable warnings in case a test never calls RUN(). */ \ 114 USE(queried_can_run); \ 115 USE(printed_sve_lane_warning); \ 116 masm.SetCPUFeatures(CPUFeatures::None()); \ 117 masm.SetGenerateSimulatorCode(true); \ 118 Decoder simulator_decoder; \ 119 RegisterDump core; \ 120 ptrdiff_t offset_after_infrastructure_start; \ 121 ptrdiff_t offset_before_infrastructure_end 122 123 #define SETUP_COMMON_SIM() \ 124 Simulator simulator(&simulator_decoder); \ 125 simulator.SetColouredTrace(Test::coloured_trace()); \ 126 simulator.SetCPUFeatures(CPUFeatures::None()) 127 128 #define START() \ 129 masm.Reset(); \ 130 simulator.ResetState(); \ 131 { \ 132 SimulationCPUFeaturesScope cpu(&masm, kInfrastructureCPUFeatures); \ 133 __ PushCalleeSavedRegisters(); \ 134 } \ 135 /* The infrastructure code hasn't been covered at the moment, e.g. */ \ 136 /* prologue/epilogue. Suppress tagging mis-match exception before */ \ 137 /* this point. */ \ 138 if (masm.GetCPUFeatures()->Has(CPUFeatures::kMTE)) { \ 139 __ Hlt(DebugHltOpcode::kMTEActive); \ 140 } \ 141 { \ 142 int trace_parameters = 0; \ 143 if (Test::trace_reg()) trace_parameters |= LOG_STATE; \ 144 if (Test::trace_write()) trace_parameters |= LOG_WRITE; \ 145 if (Test::trace_sim()) trace_parameters |= LOG_DISASM; \ 146 if (Test::trace_branch()) trace_parameters |= LOG_BRANCH; \ 147 if (trace_parameters != 0) { \ 148 __ Trace(static_cast<TraceParameters>(trace_parameters), TRACE_ENABLE); \ 149 } \ 150 } \ 151 offset_after_infrastructure_start = masm.GetCursorOffset(); \ 152 /* Avoid unused-variable warnings in case a test never calls RUN(). */ \ 153 USE(offset_after_infrastructure_start) 154 155 #define END() \ 156 offset_before_infrastructure_end = masm.GetCursorOffset(); \ 157 /* Avoid unused-variable warnings in case a test never calls RUN(). */ \ 158 USE(offset_before_infrastructure_end); \ 159 __ Trace(LOG_ALL, TRACE_DISABLE); \ 160 if (masm.GetCPUFeatures()->Has(CPUFeatures::kMTE)) { \ 161 __ Hlt(DebugHltOpcode::kMTEInactive); \ 162 } \ 163 { \ 164 SimulationCPUFeaturesScope cpu(&masm, kInfrastructureCPUFeatures); \ 165 core.Dump(&masm); \ 166 __ PopCalleeSavedRegisters(); \ 167 } \ 168 __ Ret(); \ 169 masm.FinalizeCode() 170 171 #define RUN() \ 172 RUN_WITHOUT_SEEN_FEATURE_CHECK(); \ 173 { \ 174 /* We expect the test to use all of the features it requested, plus the */ \ 175 /* features that the instruction code requires. */ \ 176 CPUFeatures const& expected_features = \ 177 simulator.GetCPUFeatures()->With(CPUFeatures::kNEON); \ 178 CPUFeatures const& seen = simulator.GetSeenFeatures(); \ 179 /* This gives three broad categories of features that we care about: */ \ 180 /* 1. Things both expected and seen. */ \ 181 /* 2. Things seen, but not expected. The simulator catches these. */ \ 182 /* 3. Things expected, but not seen. We check these here. */ \ 183 /* In a valid, passing test, categories 2 and 3 should be empty. */ \ 184 if (seen != expected_features) { \ 185 /* The Simulator should have caught anything in category 2 already. */ \ 186 VIXL_ASSERT(expected_features.Has(seen)); \ 187 /* Anything left is category 3: things expected, but not seen. This */ \ 188 /* is not necessarily a bug in VIXL itself, but indicates that the */ \ 189 /* test is less strict than it could be. */ \ 190 CPUFeatures missing = expected_features.Without(seen); \ 191 VIXL_ASSERT(missing.Count() > 0); \ 192 std::cout << "Error: expected to see CPUFeatures { " << missing \ 193 << " }\n"; \ 194 VIXL_ABORT(); \ 195 } \ 196 } 197 198 #define RUN_WITHOUT_SEEN_FEATURE_CHECK() \ 199 DISASSEMBLE(); \ 200 VIXL_ASSERT(QUERIED_CAN_RUN()); \ 201 VIXL_ASSERT(CAN_RUN()); \ 202 simulator.RunFrom(masm.GetBuffer()->GetStartAddress<Instruction*>()) 203 204 #else // ifdef VIXL_INCLUDE_SIMULATOR_AARCH64. 205 #define SETUP() \ 206 MacroAssembler masm; \ 207 SETUP_COMMON() 208 209 #define SETUP_WITH_FEATURES(...) \ 210 MacroAssembler masm; \ 211 SETUP_COMMON(); \ 212 masm.SetCPUFeatures(CPUFeatures(__VA_ARGS__)) 213 214 #define SETUP_CUSTOM(size, pic) \ 215 size_t buffer_size = size + CodeBuffer::kDefaultCapacity; \ 216 MacroAssembler masm(buffer_size, pic); \ 217 SETUP_COMMON() 218 219 #define SETUP_COMMON() \ 220 bool queried_can_run = false; \ 221 bool printed_sve_lane_warning = false; \ 222 /* Avoid unused-variable warnings in case a test never calls RUN(). */ \ 223 USE(queried_can_run); \ 224 USE(printed_sve_lane_warning); \ 225 masm.SetCPUFeatures(CPUFeatures::None()); \ 226 masm.SetGenerateSimulatorCode(false); \ 227 RegisterDump core; \ 228 CPU::SetUp(); \ 229 ptrdiff_t offset_after_infrastructure_start; \ 230 ptrdiff_t offset_before_infrastructure_end 231 232 #define START() \ 233 masm.Reset(); \ 234 { \ 235 CPUFeaturesScope cpu(&masm, kInfrastructureCPUFeatures); \ 236 __ PushCalleeSavedRegisters(); \ 237 } \ 238 offset_after_infrastructure_start = masm.GetCursorOffset(); \ 239 /* Avoid unused-variable warnings in case a test never calls RUN(). */ \ 240 USE(offset_after_infrastructure_start) 241 242 #define END() \ 243 offset_before_infrastructure_end = masm.GetCursorOffset(); \ 244 /* Avoid unused-variable warnings in case a test never calls RUN(). */ \ 245 USE(offset_before_infrastructure_end); \ 246 { \ 247 CPUFeaturesScope cpu(&masm, kInfrastructureCPUFeatures); \ 248 core.Dump(&masm); \ 249 __ PopCalleeSavedRegisters(); \ 250 } \ 251 __ Ret(); \ 252 masm.FinalizeCode() 253 254 // Execute the generated code from the memory area. 255 #define RUN() \ 256 DISASSEMBLE(); \ 257 VIXL_ASSERT(QUERIED_CAN_RUN()); \ 258 VIXL_ASSERT(CAN_RUN()); \ 259 masm.GetBuffer()->SetExecutable(); \ 260 ExecuteMemory(masm.GetBuffer()->GetStartAddress<byte*>(), \ 261 masm.GetSizeOfCodeGenerated()); \ 262 masm.GetBuffer()->SetWritable() 263 264 // This just provides compatibility with VIXL_INCLUDE_SIMULATOR_AARCH64 builds. 265 // We cannot run seen-feature checks when running natively. 266 #define RUN_WITHOUT_SEEN_FEATURE_CHECK() RUN() 267 268 #endif // ifdef VIXL_INCLUDE_SIMULATOR_AARCH64. 269 270 #define CAN_RUN() CanRun(*masm.GetCPUFeatures(), &queried_can_run) 271 #define QUERIED_CAN_RUN() (queried_can_run) 272 273 #define DISASSEMBLE() \ 274 if (Test::disassemble()) { \ 275 PrintDisassembler disasm(stdout); \ 276 CodeBuffer* buffer = masm.GetBuffer(); \ 277 Instruction* test_start = buffer->GetOffsetAddress<Instruction*>( \ 278 offset_after_infrastructure_start); \ 279 Instruction* test_end = buffer->GetOffsetAddress<Instruction*>( \ 280 offset_before_infrastructure_end); \ 281 \ 282 if (Test::disassemble_infrastructure()) { \ 283 Instruction* infra_start = buffer->GetStartAddress<Instruction*>(); \ 284 printf("# Infrastructure code (prologue)\n"); \ 285 disasm.DisassembleBuffer(infra_start, test_start); \ 286 printf("# Test code\n"); \ 287 } else { \ 288 printf( \ 289 "# Warning: Omitting infrastructure code. " \ 290 "Use --disassemble to see it.\n"); \ 291 } \ 292 \ 293 disasm.DisassembleBuffer(test_start, test_end); \ 294 \ 295 if (Test::disassemble_infrastructure()) { \ 296 printf("# Infrastructure code (epilogue)\n"); \ 297 Instruction* infra_end = buffer->GetEndAddress<Instruction*>(); \ 298 disasm.DisassembleBuffer(test_end, infra_end); \ 299 } \ 300 } 301 302 #define ASSERT_EQUAL_NZCV(expected) \ 303 VIXL_CHECK(EqualNzcv(expected, core.flags_nzcv())) 304 305 #define ASSERT_EQUAL_REGISTERS(expected) \ 306 VIXL_CHECK(EqualRegisters(&expected, &core)) 307 308 #define ASSERT_EQUAL_FP16(expected, result) \ 309 VIXL_CHECK(EqualFP16(expected, &core, result)) 310 311 #define ASSERT_EQUAL_32(expected, result) \ 312 VIXL_CHECK(Equal32(static_cast<uint32_t>(expected), &core, result)) 313 314 #define ASSERT_EQUAL_FP32(expected, result) \ 315 VIXL_CHECK(EqualFP32(expected, &core, result)) 316 317 #define ASSERT_EQUAL_64(expected, result) \ 318 VIXL_CHECK(Equal64(expected, &core, result)) 319 320 #define ASSERT_NOT_EQUAL_64(expected, result) \ 321 VIXL_CHECK(NotEqual64(expected, &core, result)) 322 323 #define ASSERT_EQUAL_FP64(expected, result) \ 324 VIXL_CHECK(EqualFP64(expected, &core, result)) 325 326 #define ASSERT_EQUAL_128(expected_h, expected_l, result) \ 327 VIXL_CHECK(Equal128(expected_h, expected_l, &core, result)) 328 329 #define ASSERT_LITERAL_POOL_SIZE(expected) \ 330 VIXL_CHECK((expected + kInstructionSize) == (masm.GetLiteralPoolSize())) 331 332 #define ASSERT_EQUAL_SVE_LANE(expected, result, lane) \ 333 VIXL_CHECK(EqualSVELane(expected, &core, result, lane)); 334 335 // If `expected` is scalar, check that every lane of `result` matches it. 336 // If `expected` is an array of N expected values, check that the first N 337 // lanes on `result` match. The rightmost (highest-indexed) array element maps 338 // to the lowest-numbered lane. 339 #define ASSERT_EQUAL_SVE(expected, result) \ 340 VIXL_CHECK(EqualSVE(expected, &core, result, &printed_sve_lane_warning)) 341 342 #define ASSERT_EQUAL_MEMORY(expected, result, ...) \ 343 VIXL_CHECK(EqualMemory(reinterpret_cast<void*>(expected), \ 344 reinterpret_cast<void*>(result), \ 345 __VA_ARGS__)) 346 347 #define MUST_FAIL_WITH_MESSAGE(code, message) \ 348 { \ 349 bool aborted = false; \ 350 try { \ 351 code; \ 352 } catch (const std::runtime_error& e) { \ 353 const char* expected_error = message; \ 354 size_t error_length = strlen(expected_error); \ 355 if (strncmp(expected_error, e.what(), error_length) == 0) { \ 356 aborted = true; \ 357 } else { \ 358 printf("Mismatch in error message.\n"); \ 359 printf("Expected: %s\n", expected_error); \ 360 printf("Found: %s\n", e.what()); \ 361 } \ 362 } \ 363 VIXL_CHECK(aborted); \ 364 } 365 366 } // namespace aarch64 367 } // namespace vixl 368