1 // Copyright 2017 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #if !V8_ENABLE_WEBASSEMBLY
6 #error This header should only be included if WebAssembly is enabled.
7 #endif  // !V8_ENABLE_WEBASSEMBLY
8 
9 #ifndef V8_WASM_FUNCTION_BODY_DECODER_IMPL_H_
10 #define V8_WASM_FUNCTION_BODY_DECODER_IMPL_H_
11 
12 // Do only include this header for implementing new Interface of the
13 // WasmFullDecoder.
14 
15 #include <inttypes.h>
16 
17 #include "src/base/platform/elapsed-timer.h"
18 #include "src/base/platform/wrappers.h"
19 #include "src/base/small-vector.h"
20 #include "src/base/strings.h"
21 #include "src/utils/bit-vector.h"
22 #include "src/wasm/decoder.h"
23 #include "src/wasm/function-body-decoder.h"
24 #include "src/wasm/value-type.h"
25 #include "src/wasm/wasm-features.h"
26 #include "src/wasm/wasm-limits.h"
27 #include "src/wasm/wasm-module.h"
28 #include "src/wasm/wasm-opcodes.h"
29 #include "src/wasm/wasm-subtyping.h"
30 
31 namespace v8 {
32 namespace internal {
33 namespace wasm {
34 
35 struct WasmGlobal;
36 struct WasmTag;
37 
38 #define TRACE(...)                                    \
39   do {                                                \
40     if (FLAG_trace_wasm_decoder) PrintF(__VA_ARGS__); \
41   } while (false)
42 
43 #define TRACE_INST_FORMAT "  @%-8d #%-30s|"
44 
45 // Return the evaluation of `condition` if validate==true, DCHECK that it's
46 // true and always return true otherwise.
47 #define VALIDATE(condition)                \
48   (validate ? V8_LIKELY(condition) : [&] { \
49     DCHECK(condition);                     \
50     return true;                           \
51   }())
52 
53 #define CHECK_PROTOTYPE_OPCODE(feat)                                         \
54   DCHECK(this->module_->origin == kWasmOrigin);                              \
55   if (!VALIDATE(this->enabled_.has_##feat())) {                              \
56     this->DecodeError(                                                       \
57         "Invalid opcode 0x%02x (enable with --experimental-wasm-" #feat ")", \
58         opcode);                                                             \
59     return 0;                                                                \
60   }                                                                          \
61   this->detected_->Add(kFeature_##feat);
62 
63 #define ATOMIC_OP_LIST(V)                \
64   V(AtomicNotify, Uint32)                \
65   V(I32AtomicWait, Uint32)               \
66   V(I64AtomicWait, Uint64)               \
67   V(I32AtomicLoad, Uint32)               \
68   V(I64AtomicLoad, Uint64)               \
69   V(I32AtomicLoad8U, Uint8)              \
70   V(I32AtomicLoad16U, Uint16)            \
71   V(I64AtomicLoad8U, Uint8)              \
72   V(I64AtomicLoad16U, Uint16)            \
73   V(I64AtomicLoad32U, Uint32)            \
74   V(I32AtomicAdd, Uint32)                \
75   V(I32AtomicAdd8U, Uint8)               \
76   V(I32AtomicAdd16U, Uint16)             \
77   V(I64AtomicAdd, Uint64)                \
78   V(I64AtomicAdd8U, Uint8)               \
79   V(I64AtomicAdd16U, Uint16)             \
80   V(I64AtomicAdd32U, Uint32)             \
81   V(I32AtomicSub, Uint32)                \
82   V(I64AtomicSub, Uint64)                \
83   V(I32AtomicSub8U, Uint8)               \
84   V(I32AtomicSub16U, Uint16)             \
85   V(I64AtomicSub8U, Uint8)               \
86   V(I64AtomicSub16U, Uint16)             \
87   V(I64AtomicSub32U, Uint32)             \
88   V(I32AtomicAnd, Uint32)                \
89   V(I64AtomicAnd, Uint64)                \
90   V(I32AtomicAnd8U, Uint8)               \
91   V(I32AtomicAnd16U, Uint16)             \
92   V(I64AtomicAnd8U, Uint8)               \
93   V(I64AtomicAnd16U, Uint16)             \
94   V(I64AtomicAnd32U, Uint32)             \
95   V(I32AtomicOr, Uint32)                 \
96   V(I64AtomicOr, Uint64)                 \
97   V(I32AtomicOr8U, Uint8)                \
98   V(I32AtomicOr16U, Uint16)              \
99   V(I64AtomicOr8U, Uint8)                \
100   V(I64AtomicOr16U, Uint16)              \
101   V(I64AtomicOr32U, Uint32)              \
102   V(I32AtomicXor, Uint32)                \
103   V(I64AtomicXor, Uint64)                \
104   V(I32AtomicXor8U, Uint8)               \
105   V(I32AtomicXor16U, Uint16)             \
106   V(I64AtomicXor8U, Uint8)               \
107   V(I64AtomicXor16U, Uint16)             \
108   V(I64AtomicXor32U, Uint32)             \
109   V(I32AtomicExchange, Uint32)           \
110   V(I64AtomicExchange, Uint64)           \
111   V(I32AtomicExchange8U, Uint8)          \
112   V(I32AtomicExchange16U, Uint16)        \
113   V(I64AtomicExchange8U, Uint8)          \
114   V(I64AtomicExchange16U, Uint16)        \
115   V(I64AtomicExchange32U, Uint32)        \
116   V(I32AtomicCompareExchange, Uint32)    \
117   V(I64AtomicCompareExchange, Uint64)    \
118   V(I32AtomicCompareExchange8U, Uint8)   \
119   V(I32AtomicCompareExchange16U, Uint16) \
120   V(I64AtomicCompareExchange8U, Uint8)   \
121   V(I64AtomicCompareExchange16U, Uint16) \
122   V(I64AtomicCompareExchange32U, Uint32)
123 
124 #define ATOMIC_STORE_OP_LIST(V) \
125   V(I32AtomicStore, Uint32)     \
126   V(I64AtomicStore, Uint64)     \
127   V(I32AtomicStore8U, Uint8)    \
128   V(I32AtomicStore16U, Uint16)  \
129   V(I64AtomicStore8U, Uint8)    \
130   V(I64AtomicStore16U, Uint16)  \
131   V(I64AtomicStore32U, Uint32)
132 
133 // Decoder error with explicit PC and format arguments.
134 template <Decoder::ValidateFlag validate, typename... Args>
DecodeError(Decoder* decoder, const byte* pc, const char* str, Args&&... args)135 void DecodeError(Decoder* decoder, const byte* pc, const char* str,
136                  Args&&... args) {
137   CHECK(validate == Decoder::kFullValidation ||
138         validate == Decoder::kBooleanValidation);
139   STATIC_ASSERT(sizeof...(Args) > 0);
140   if (validate == Decoder::kBooleanValidation) {
141     decoder->MarkError();
142   } else {
143     decoder->errorf(pc, str, std::forward<Args>(args)...);
144   }
145 }
146 
147 // Decoder error with explicit PC and no format arguments.
148 template <Decoder::ValidateFlag validate>
DecodeError(Decoder* decoder, const byte* pc, const char* str)149 void DecodeError(Decoder* decoder, const byte* pc, const char* str) {
150   CHECK(validate == Decoder::kFullValidation ||
151         validate == Decoder::kBooleanValidation);
152   if (validate == Decoder::kBooleanValidation) {
153     decoder->MarkError();
154   } else {
155     decoder->error(pc, str);
156   }
157 }
158 
159 // Decoder error without explicit PC, but with format arguments.
160 template <Decoder::ValidateFlag validate, typename... Args>
DecodeError(Decoder* decoder, const char* str, Args&&... args)161 void DecodeError(Decoder* decoder, const char* str, Args&&... args) {
162   CHECK(validate == Decoder::kFullValidation ||
163         validate == Decoder::kBooleanValidation);
164   STATIC_ASSERT(sizeof...(Args) > 0);
165   if (validate == Decoder::kBooleanValidation) {
166     decoder->MarkError();
167   } else {
168     decoder->errorf(str, std::forward<Args>(args)...);
169   }
170 }
171 
172 // Decoder error without explicit PC and without format arguments.
173 template <Decoder::ValidateFlag validate>
DecodeError(Decoder* decoder, const char* str)174 void DecodeError(Decoder* decoder, const char* str) {
175   CHECK(validate == Decoder::kFullValidation ||
176         validate == Decoder::kBooleanValidation);
177   if (validate == Decoder::kBooleanValidation) {
178     decoder->MarkError();
179   } else {
180     decoder->error(str);
181   }
182 }
183 
184 namespace value_type_reader {
185 
186 // If {module} is not null, the read index will be checked against the module's
187 // type capacity.
188 template <Decoder::ValidateFlag validate>
read_heap_type(Decoder* decoder, const byte* pc, uint32_t* const length, const WasmModule* module, const WasmFeatures& enabled)189 HeapType read_heap_type(Decoder* decoder, const byte* pc,
190                         uint32_t* const length, const WasmModule* module,
191                         const WasmFeatures& enabled) {
192   int64_t heap_index = decoder->read_i33v<validate>(pc, length, "heap type");
193   if (heap_index < 0) {
194     int64_t min_1_byte_leb128 = -64;
195     if (!VALIDATE(heap_index >= min_1_byte_leb128)) {
196       DecodeError<validate>(decoder, pc, "Unknown heap type %" PRId64,
197                             heap_index);
198       return HeapType(HeapType::kBottom);
199     }
200     uint8_t uint_7_mask = 0x7F;
201     uint8_t code = static_cast<ValueTypeCode>(heap_index) & uint_7_mask;
202     switch (code) {
203       case kEqRefCode:
204       case kI31RefCode:
205       case kDataRefCode:
206       case kArrayRefCode:
207       case kAnyRefCodeAlias:
208         if (!VALIDATE(enabled.has_gc())) {
209           DecodeError<validate>(
210               decoder, pc,
211               "invalid heap type '%s', enable with --experimental-wasm-gc",
212               HeapType::from_code(code).name().c_str());
213           return HeapType(HeapType::kBottom);
214         }
215         V8_FALLTHROUGH;
216       case kAnyRefCode:
217       case kFuncRefCode:
218         return HeapType::from_code(code);
219       default:
220         DecodeError<validate>(decoder, pc, "Unknown heap type %" PRId64,
221                               heap_index);
222         return HeapType(HeapType::kBottom);
223     }
224   } else {
225     if (!VALIDATE(enabled.has_typed_funcref())) {
226       DecodeError<validate>(decoder, pc,
227                             "Invalid indexed heap type, enable with "
228                             "--experimental-wasm-typed-funcref");
229       return HeapType(HeapType::kBottom);
230     }
231     uint32_t type_index = static_cast<uint32_t>(heap_index);
232     if (!VALIDATE(type_index < kV8MaxWasmTypes)) {
233       DecodeError<validate>(
234           decoder, pc,
235           "Type index %u is greater than the maximum number %zu "
236           "of type definitions supported by V8",
237           type_index, kV8MaxWasmTypes);
238       return HeapType(HeapType::kBottom);
239     }
240     // We use capacity over size so this works mid-DecodeTypeSection.
241     if (!VALIDATE(module == nullptr || type_index < module->types.capacity())) {
242       DecodeError<validate>(decoder, pc, "Type index %u is out of bounds",
243                             type_index);
244       return HeapType(HeapType::kBottom);
245     }
246     return HeapType(type_index);
247   }
248 }
249 
250 HeapType consume_heap_type(Decoder* decoder, const WasmModule* module,
251                            const WasmFeatures& enabled);
252 
253 // Read a value type starting at address {pc} using {decoder}.
254 // No bytes are consumed.
255 // The length of the read value type is written in {length}.
256 // Registers an error for an invalid type only if {validate} is not
257 // kNoValidate.
258 template <Decoder::ValidateFlag validate>
read_value_type(Decoder* decoder, const byte* pc, uint32_t* const length, const WasmModule* module, const WasmFeatures& enabled)259 ValueType read_value_type(Decoder* decoder, const byte* pc,
260                           uint32_t* const length, const WasmModule* module,
261                           const WasmFeatures& enabled) {
262   *length = 1;
263   byte val = decoder->read_u8<validate>(pc, "value type opcode");
264   if (decoder->failed()) {
265     *length = 0;
266     return kWasmBottom;
267   }
268   ValueTypeCode code = static_cast<ValueTypeCode>(val);
269   switch (code) {
270     case kEqRefCode:
271     case kI31RefCode:
272     case kDataRefCode:
273     case kArrayRefCode:
274     case kAnyRefCodeAlias:
275       if (!VALIDATE(enabled.has_gc())) {
276         DecodeError<validate>(
277             decoder, pc,
278             "invalid value type '%sref', enable with --experimental-wasm-gc",
279             HeapType::from_code(code).name().c_str());
280         return kWasmBottom;
281       }
282       V8_FALLTHROUGH;
283     case kAnyRefCode:
284     case kFuncRefCode: {
285       HeapType heap_type = HeapType::from_code(code);
286       Nullability nullability =
287           code == kI31RefCode || code == kDataRefCode || code == kArrayRefCode
288               ? kNonNullable
289               : kNullable;
290       return ValueType::Ref(heap_type, nullability);
291     }
292     case kI32Code:
293       return kWasmI32;
294     case kI64Code:
295       return kWasmI64;
296     case kF32Code:
297       return kWasmF32;
298     case kF64Code:
299       return kWasmF64;
300     case kRefCode:
301     case kOptRefCode: {
302       Nullability nullability = code == kOptRefCode ? kNullable : kNonNullable;
303       if (!VALIDATE(enabled.has_typed_funcref())) {
304         DecodeError<validate>(decoder, pc,
305                               "Invalid type '(ref%s <heaptype>)', enable with "
306                               "--experimental-wasm-typed-funcref",
307                               nullability == kNullable ? " null" : "");
308         return kWasmBottom;
309       }
310       HeapType heap_type =
311           read_heap_type<validate>(decoder, pc + 1, length, module, enabled);
312       *length += 1;
313       return heap_type.is_bottom() ? kWasmBottom
314                                    : ValueType::Ref(heap_type, nullability);
315     }
316     // TODO(7748): This is here only for backwards compatibility, and the parsed
317     // depth is ignored.
318     case kRttWithDepthCode:
319     case kRttCode: {
320       if (!VALIDATE(enabled.has_gc())) {
321         DecodeError<validate>(
322             decoder, pc,
323             "invalid value type 'rtt', enable with --experimental-wasm-gc");
324         return kWasmBottom;
325       }
326       if (code == kRttWithDepthCode) {
327         uint32_t depth = decoder->read_u32v<validate>(pc + 1, length, "depth");
328         *length += 1;
329         if (!VALIDATE(depth <= kV8MaxRttSubtypingDepth)) {
330           DecodeError<validate>(
331               decoder, pc,
332               "subtyping depth %u is greater than the maximum depth "
333               "%u supported by V8",
334               depth, kV8MaxRttSubtypingDepth);
335           return kWasmBottom;
336         }
337       }
338       uint32_t type_index_length;
339       uint32_t type_index =
340           decoder->read_u32v<validate>(pc + *length, &type_index_length);
341       *length += type_index_length;
342       if (!VALIDATE(type_index < kV8MaxWasmTypes)) {
343         DecodeError<validate>(
344             decoder, pc,
345             "Type index %u is greater than the maximum number %zu "
346             "of type definitions supported by V8",
347             type_index, kV8MaxWasmTypes);
348         return kWasmBottom;
349       }
350       // We use capacity over size so this works mid-DecodeTypeSection.
351       if (!VALIDATE(module == nullptr ||
352                     type_index < module->types.capacity())) {
353         DecodeError<validate>(decoder, pc, "Type index %u is out of bounds",
354                               type_index);
355         return kWasmBottom;
356       }
357       return ValueType::Rtt(type_index);
358     }
359     case kS128Code: {
360       if (!VALIDATE(enabled.has_simd())) {
361         DecodeError<validate>(
362             decoder, pc,
363             "invalid value type 's128', enable with --experimental-wasm-simd");
364         return kWasmBottom;
365       }
366       if (!VALIDATE(CheckHardwareSupportsSimd())) {
367         DecodeError<validate>(decoder, pc, "Wasm SIMD unsupported");
368         return kWasmBottom;
369       }
370       return kWasmS128;
371     }
372     // Although these codes are included in ValueTypeCode, they technically
373     // do not correspond to value types and are only used in specific
374     // contexts. The caller of this function is responsible for handling them.
375     case kVoidCode:
376     case kI8Code:
377     case kI16Code:
378       if (validate) {
379         DecodeError<validate>(decoder, pc, "invalid value type 0x%x", code);
380       }
381       return kWasmBottom;
382   }
383   // Anything that doesn't match an enumeration value is an invalid type code.
384   if (validate) {
385     DecodeError<validate>(decoder, pc, "invalid value type 0x%x", code);
386   }
387   return kWasmBottom;
388 }
389 }  // namespace value_type_reader
390 
391 enum DecodingMode { kFunctionBody, kInitExpression };
392 
393 // Helpers for decoding different kinds of immediates which follow bytecodes.
394 template <Decoder::ValidateFlag validate>
395 struct ImmI32Immediate {
396   int32_t value;
397   uint32_t length;
ImmI32Immediatev8::internal::wasm::ImmI32Immediate398   ImmI32Immediate(Decoder* decoder, const byte* pc) {
399     value = decoder->read_i32v<validate>(pc, &length, "immi32");
400   }
401 };
402 
403 template <Decoder::ValidateFlag validate>
404 struct ImmI64Immediate {
405   int64_t value;
406   uint32_t length;
ImmI64Immediatev8::internal::wasm::ImmI64Immediate407   ImmI64Immediate(Decoder* decoder, const byte* pc) {
408     value = decoder->read_i64v<validate>(pc, &length, "immi64");
409   }
410 };
411 
412 template <Decoder::ValidateFlag validate>
413 struct ImmF32Immediate {
414   float value;
415   uint32_t length = 4;
ImmF32Immediatev8::internal::wasm::ImmF32Immediate416   ImmF32Immediate(Decoder* decoder, const byte* pc) {
417     // We can't use bit_cast here because calling any helper function that
418     // returns a float would potentially flip NaN bits per C++ semantics, so we
419     // have to inline the memcpy call directly.
420     uint32_t tmp = decoder->read_u32<validate>(pc, "immf32");
421     memcpy(&value, &tmp, sizeof(value));
422   }
423 };
424 
425 template <Decoder::ValidateFlag validate>
426 struct ImmF64Immediate {
427   double value;
428   uint32_t length = 8;
ImmF64Immediatev8::internal::wasm::ImmF64Immediate429   ImmF64Immediate(Decoder* decoder, const byte* pc) {
430     // Avoid bit_cast because it might not preserve the signalling bit of a NaN.
431     uint64_t tmp = decoder->read_u64<validate>(pc, "immf64");
432     memcpy(&value, &tmp, sizeof(value));
433   }
434 };
435 
436 // This is different than IndexImmediate because {index} is a byte.
437 template <Decoder::ValidateFlag validate>
438 struct MemoryIndexImmediate {
439   uint8_t index = 0;
440   uint32_t length = 1;
MemoryIndexImmediatev8::internal::wasm::MemoryIndexImmediate441   MemoryIndexImmediate(Decoder* decoder, const byte* pc) {
442     index = decoder->read_u8<validate>(pc, "memory index");
443   }
444 };
445 
446 // Parent class for all Immediates which read a u32v index value in their
447 // constructor.
448 template <Decoder::ValidateFlag validate>
449 struct IndexImmediate {
450   uint32_t index;
451   uint32_t length;
452 
IndexImmediatev8::internal::wasm::IndexImmediate453   IndexImmediate(Decoder* decoder, const byte* pc, const char* name) {
454     index = decoder->read_u32v<validate>(pc, &length, name);
455   }
456 };
457 
458 template <Decoder::ValidateFlag validate>
459 struct TagIndexImmediate : public IndexImmediate<validate> {
460   const WasmTag* tag = nullptr;
461 
TagIndexImmediatev8::internal::wasm::TagIndexImmediate462   TagIndexImmediate(Decoder* decoder, const byte* pc)
463       : IndexImmediate<validate>(decoder, pc, "tag index") {}
464 };
465 
466 template <Decoder::ValidateFlag validate>
467 struct GlobalIndexImmediate : public IndexImmediate<validate> {
468   const WasmGlobal* global = nullptr;
469 
GlobalIndexImmediatev8::internal::wasm::GlobalIndexImmediate470   GlobalIndexImmediate(Decoder* decoder, const byte* pc)
471       : IndexImmediate<validate>(decoder, pc, "global index") {}
472 };
473 
474 template <Decoder::ValidateFlag validate>
475 struct StructIndexImmediate : public IndexImmediate<validate> {
476   const StructType* struct_type = nullptr;
477 
StructIndexImmediatev8::internal::wasm::StructIndexImmediate478   StructIndexImmediate(Decoder* decoder, const byte* pc)
479       : IndexImmediate<validate>(decoder, pc, "struct index") {}
480 };
481 
482 template <Decoder::ValidateFlag validate>
483 struct ArrayIndexImmediate : public IndexImmediate<validate> {
484   const ArrayType* array_type = nullptr;
485 
ArrayIndexImmediatev8::internal::wasm::ArrayIndexImmediate486   ArrayIndexImmediate(Decoder* decoder, const byte* pc)
487       : IndexImmediate<validate>(decoder, pc, "array index") {}
488 };
489 template <Decoder::ValidateFlag validate>
490 struct CallFunctionImmediate : public IndexImmediate<validate> {
491   const FunctionSig* sig = nullptr;
492 
CallFunctionImmediatev8::internal::wasm::CallFunctionImmediate493   CallFunctionImmediate(Decoder* decoder, const byte* pc)
494       : IndexImmediate<validate>(decoder, pc, "function index") {}
495 };
496 
497 template <Decoder::ValidateFlag validate>
498 struct SelectTypeImmediate {
499   uint32_t length;
500   ValueType type;
501 
SelectTypeImmediatev8::internal::wasm::SelectTypeImmediate502   SelectTypeImmediate(const WasmFeatures& enabled, Decoder* decoder,
503                       const byte* pc, const WasmModule* module) {
504     uint8_t num_types =
505         decoder->read_u32v<validate>(pc, &length, "number of select types");
506     if (!VALIDATE(num_types == 1)) {
507       DecodeError<validate>(
508           decoder, pc + 1,
509           "Invalid number of types. Select accepts exactly one type");
510       return;
511     }
512     uint32_t type_length;
513     type = value_type_reader::read_value_type<validate>(
514         decoder, pc + length, &type_length, module, enabled);
515     length += type_length;
516   }
517 };
518 
519 template <Decoder::ValidateFlag validate>
520 struct BlockTypeImmediate {
521   uint32_t length = 1;
522   ValueType type = kWasmVoid;
523   uint32_t sig_index = 0;
524   const FunctionSig* sig = nullptr;
525 
BlockTypeImmediatev8::internal::wasm::BlockTypeImmediate526   BlockTypeImmediate(const WasmFeatures& enabled, Decoder* decoder,
527                      const byte* pc, const WasmModule* module) {
528     int64_t block_type =
529         decoder->read_i33v<validate>(pc, &length, "block type");
530     if (block_type < 0) {
531       // All valid negative types are 1 byte in length, so we check against the
532       // minimum 1-byte LEB128 value.
533       constexpr int64_t min_1_byte_leb128 = -64;
534       if (!VALIDATE(block_type >= min_1_byte_leb128)) {
535         DecodeError<validate>(decoder, pc, "invalid block type %" PRId64,
536                               block_type);
537         return;
538       }
539       if (static_cast<ValueTypeCode>(block_type & 0x7F) == kVoidCode) return;
540       type = value_type_reader::read_value_type<validate>(decoder, pc, &length,
541                                                           module, enabled);
542     } else {
543       type = kWasmBottom;
544       sig_index = static_cast<uint32_t>(block_type);
545     }
546   }
547 
in_arityv8::internal::wasm::BlockTypeImmediate548   uint32_t in_arity() const {
549     if (type != kWasmBottom) return 0;
550     return static_cast<uint32_t>(sig->parameter_count());
551   }
out_arityv8::internal::wasm::BlockTypeImmediate552   uint32_t out_arity() const {
553     if (type == kWasmVoid) return 0;
554     if (type != kWasmBottom) return 1;
555     return static_cast<uint32_t>(sig->return_count());
556   }
in_typev8::internal::wasm::BlockTypeImmediate557   ValueType in_type(uint32_t index) {
558     DCHECK_EQ(kWasmBottom, type);
559     return sig->GetParam(index);
560   }
out_typev8::internal::wasm::BlockTypeImmediate561   ValueType out_type(uint32_t index) {
562     if (type == kWasmBottom) return sig->GetReturn(index);
563     DCHECK_NE(kWasmVoid, type);
564     DCHECK_EQ(0, index);
565     return type;
566   }
567 };
568 
569 template <Decoder::ValidateFlag validate>
570 struct BranchDepthImmediate {
571   uint32_t depth;
572   uint32_t length;
BranchDepthImmediatev8::internal::wasm::BranchDepthImmediate573   BranchDepthImmediate(Decoder* decoder, const byte* pc) {
574     depth = decoder->read_u32v<validate>(pc, &length, "branch depth");
575   }
576 };
577 
578 template <Decoder::ValidateFlag validate>
579 struct FieldImmediate {
580   StructIndexImmediate<validate> struct_imm;
581   IndexImmediate<validate> field_imm;
582   uint32_t length;
FieldImmediatev8::internal::wasm::FieldImmediate583   FieldImmediate(Decoder* decoder, const byte* pc)
584       : struct_imm(decoder, pc),
585         field_imm(decoder, pc + struct_imm.length, "field index"),
586         length(struct_imm.length + field_imm.length) {}
587 };
588 
589 template <Decoder::ValidateFlag validate>
590 struct CallIndirectImmediate {
591   IndexImmediate<validate> sig_imm;
592   IndexImmediate<validate> table_imm;
593   uint32_t length;
594   const FunctionSig* sig = nullptr;
CallIndirectImmediatev8::internal::wasm::CallIndirectImmediate595   CallIndirectImmediate(Decoder* decoder, const byte* pc)
596       : sig_imm(decoder, pc, "singature index"),
597         table_imm(decoder, pc + sig_imm.length, "table index"),
598         length(sig_imm.length + table_imm.length) {}
599 };
600 
601 template <Decoder::ValidateFlag validate>
602 struct BranchTableImmediate {
603   uint32_t table_count;
604   const byte* start;
605   const byte* table;
BranchTableImmediatev8::internal::wasm::BranchTableImmediate606   BranchTableImmediate(Decoder* decoder, const byte* pc) {
607     start = pc;
608     uint32_t len = 0;
609     table_count = decoder->read_u32v<validate>(pc, &len, "table count");
610     table = pc + len;
611   }
612 };
613 
614 // A helper to iterate over a branch table.
615 template <Decoder::ValidateFlag validate>
616 class BranchTableIterator {
617  public:
cur_index()618   uint32_t cur_index() { return index_; }
has_next()619   bool has_next() { return VALIDATE(decoder_->ok()) && index_ <= table_count_; }
next()620   uint32_t next() {
621     DCHECK(has_next());
622     index_++;
623     uint32_t length;
624     uint32_t result =
625         decoder_->read_u32v<validate>(pc_, &length, "branch table entry");
626     pc_ += length;
627     return result;
628   }
629   // length, including the length of the {BranchTableImmediate}, but not the
630   // opcode.
length()631   uint32_t length() {
632     while (has_next()) next();
633     return static_cast<uint32_t>(pc_ - start_);
634   }
pc()635   const byte* pc() { return pc_; }
636 
BranchTableIterator(Decoder* decoder, const BranchTableImmediate<validate>& imm)637   BranchTableIterator(Decoder* decoder,
638                       const BranchTableImmediate<validate>& imm)
639       : decoder_(decoder),
640         start_(imm.start),
641         pc_(imm.table),
642         table_count_(imm.table_count) {}
643 
644  private:
645   Decoder* const decoder_;
646   const byte* start_;
647   const byte* pc_;
648   uint32_t index_ = 0;          // the current index.
649   const uint32_t table_count_;  // the count of entries, not including default.
650 };
651 
652 template <Decoder::ValidateFlag validate,
653           DecodingMode decoding_mode = kFunctionBody>
654 class WasmDecoder;
655 
656 template <Decoder::ValidateFlag validate>
657 struct MemoryAccessImmediate {
658   uint32_t alignment;
659   uint64_t offset;
660   uint32_t length = 0;
MemoryAccessImmediatev8::internal::wasm::MemoryAccessImmediate661   MemoryAccessImmediate(Decoder* decoder, const byte* pc,
662                         uint32_t max_alignment, bool is_memory64) {
663     uint32_t alignment_length;
664     alignment =
665         decoder->read_u32v<validate>(pc, &alignment_length, "alignment");
666     if (!VALIDATE(alignment <= max_alignment)) {
667       DecodeError<validate>(
668           decoder, pc,
669           "invalid alignment; expected maximum alignment is %u, "
670           "actual alignment is %u",
671           max_alignment, alignment);
672     }
673     uint32_t offset_length;
674     offset = is_memory64 ? decoder->read_u64v<validate>(
675                                pc + alignment_length, &offset_length, "offset")
676                          : decoder->read_u32v<validate>(
677                                pc + alignment_length, &offset_length, "offset");
678     length = alignment_length + offset_length;
679   }
680 };
681 
682 // Immediate for SIMD lane operations.
683 template <Decoder::ValidateFlag validate>
684 struct SimdLaneImmediate {
685   uint8_t lane;
686   uint32_t length = 1;
687 
SimdLaneImmediatev8::internal::wasm::SimdLaneImmediate688   SimdLaneImmediate(Decoder* decoder, const byte* pc) {
689     lane = decoder->read_u8<validate>(pc, "lane");
690   }
691 };
692 
693 // Immediate for SIMD S8x16 shuffle operations.
694 template <Decoder::ValidateFlag validate>
695 struct Simd128Immediate {
696   uint8_t value[kSimd128Size] = {0};
697 
Simd128Immediatev8::internal::wasm::Simd128Immediate698   Simd128Immediate(Decoder* decoder, const byte* pc) {
699     for (uint32_t i = 0; i < kSimd128Size; ++i) {
700       value[i] = decoder->read_u8<validate>(pc + i, "value");
701     }
702   }
703 };
704 
705 template <Decoder::ValidateFlag validate>
706 struct MemoryInitImmediate {
707   IndexImmediate<validate> data_segment;
708   MemoryIndexImmediate<validate> memory;
709   uint32_t length;
710 
MemoryInitImmediatev8::internal::wasm::MemoryInitImmediate711   MemoryInitImmediate(Decoder* decoder, const byte* pc)
712       : data_segment(decoder, pc, "data segment index"),
713         memory(decoder, pc + data_segment.length),
714         length(data_segment.length + memory.length) {}
715 };
716 
717 template <Decoder::ValidateFlag validate>
718 struct MemoryCopyImmediate {
719   MemoryIndexImmediate<validate> memory_src;
720   MemoryIndexImmediate<validate> memory_dst;
721   uint32_t length;
722 
MemoryCopyImmediatev8::internal::wasm::MemoryCopyImmediate723   MemoryCopyImmediate(Decoder* decoder, const byte* pc)
724       : memory_src(decoder, pc),
725         memory_dst(decoder, pc + memory_src.length),
726         length(memory_src.length + memory_dst.length) {}
727 };
728 
729 template <Decoder::ValidateFlag validate>
730 struct TableInitImmediate {
731   IndexImmediate<validate> element_segment;
732   IndexImmediate<validate> table;
733   uint32_t length;
734 
TableInitImmediatev8::internal::wasm::TableInitImmediate735   TableInitImmediate(Decoder* decoder, const byte* pc)
736       : element_segment(decoder, pc, "element segment index"),
737         table(decoder, pc + element_segment.length, "table index"),
738         length(element_segment.length + table.length) {}
739 };
740 
741 template <Decoder::ValidateFlag validate>
742 struct TableCopyImmediate {
743   IndexImmediate<validate> table_dst;
744   IndexImmediate<validate> table_src;
745   uint32_t length;
746 
TableCopyImmediatev8::internal::wasm::TableCopyImmediate747   TableCopyImmediate(Decoder* decoder, const byte* pc)
748       : table_dst(decoder, pc, "table index"),
749         table_src(decoder, pc + table_dst.length, "table index"),
750         length(table_src.length + table_dst.length) {}
751 };
752 
753 template <Decoder::ValidateFlag validate>
754 struct HeapTypeImmediate {
755   uint32_t length = 1;
756   HeapType type;
HeapTypeImmediatev8::internal::wasm::HeapTypeImmediate757   HeapTypeImmediate(const WasmFeatures& enabled, Decoder* decoder,
758                     const byte* pc, const WasmModule* module)
759       : type(value_type_reader::read_heap_type<validate>(decoder, pc, &length,
760                                                          module, enabled)) {}
761 };
762 
763 template <Decoder::ValidateFlag validate>
764 struct PcForErrors {
PcForErrorsv8::internal::wasm::PcForErrors765   PcForErrors(const byte* /* pc */) {}
766 
pcv8::internal::wasm::PcForErrors767   const byte* pc() const { return nullptr; }
768 };
769 
770 template <>
771 struct PcForErrors<Decoder::kFullValidation> {
772   const byte* pc_for_errors = nullptr;
773 
PcForErrorsv8::internal::wasm::PcForErrors774   PcForErrors(const byte* pc) : pc_for_errors(pc) {}
775 
pcv8::internal::wasm::PcForErrors776   const byte* pc() const { return pc_for_errors; }
777 };
778 
779 // An entry on the value stack.
780 template <Decoder::ValidateFlag validate>
781 struct ValueBase : public PcForErrors<validate> {
782   ValueType type = kWasmVoid;
783 
ValueBasev8::internal::wasm::ValueBase784   ValueBase(const byte* pc, ValueType type)
785       : PcForErrors<validate>(pc), type(type) {}
786 };
787 
788 template <typename Value>
789 struct Merge {
790   uint32_t arity = 0;
791   union {  // Either multiple values or a single value.
792     Value* array;
793     Value first;
794   } vals = {nullptr};  // Initialize {array} with {nullptr}.
795 
796   // Tracks whether this merge was ever reached. Uses precise reachability, like
797   // Reachability::kReachable.
798   bool reached;
799 
Mergev8::internal::wasm::Merge800   explicit Merge(bool reached = false) : reached(reached) {}
801 
operator []v8::internal::wasm::Merge802   Value& operator[](uint32_t i) {
803     DCHECK_GT(arity, i);
804     return arity == 1 ? vals.first : vals.array[i];
805   }
806 };
807 
808 enum ControlKind : uint8_t {
809   kControlIf,
810   kControlIfElse,
811   kControlBlock,
812   kControlLoop,
813   kControlLet,
814   kControlTry,
815   kControlTryCatch,
816   kControlTryCatchAll,
817 };
818 
819 enum Reachability : uint8_t {
820   // reachable code.
821   kReachable,
822   // reachable code in unreachable block (implies normal validation).
823   kSpecOnlyReachable,
824   // code unreachable in its own block (implies polymorphic validation).
825   kUnreachable
826 };
827 
828 // An entry on the control stack (i.e. if, block, loop, or try).
829 template <typename Value, Decoder::ValidateFlag validate>
830 struct ControlBase : public PcForErrors<validate> {
831   ControlKind kind = kControlBlock;
832   uint32_t locals_count = 0;  // Additional locals introduced in this 'let'.
833   uint32_t stack_depth = 0;   // Stack height at the beginning of the construct.
834   uint32_t init_stack_depth = 0;  // Height of "locals initialization" stack
835                                   // at the beginning of the construct.
836   int32_t previous_catch = -1;  // Depth of the innermost catch containing this
837                                 // 'try'.
838   Reachability reachability = kReachable;
839 
840   // Values merged into the start or end of this control construct.
841   Merge<Value> start_merge;
842   Merge<Value> end_merge;
843 
844   MOVE_ONLY_NO_DEFAULT_CONSTRUCTOR(ControlBase);
845 
ControlBasev8::internal::wasm::ControlBase846   ControlBase(ControlKind kind, uint32_t locals_count, uint32_t stack_depth,
847               uint32_t init_stack_depth, const uint8_t* pc,
848               Reachability reachability)
849       : PcForErrors<validate>(pc),
850         kind(kind),
851         locals_count(locals_count),
852         stack_depth(stack_depth),
853         init_stack_depth(init_stack_depth),
854         reachability(reachability),
855         start_merge(reachability == kReachable) {
856     DCHECK(kind == kControlLet || locals_count == 0);
857   }
858 
859   // Check whether the current block is reachable.
reachablev8::internal::wasm::ControlBase860   bool reachable() const { return reachability == kReachable; }
861 
862   // Check whether the rest of the block is unreachable.
863   // Note that this is different from {!reachable()}, as there is also the
864   // "indirect unreachable state", for which both {reachable()} and
865   // {unreachable()} return false.
unreachablev8::internal::wasm::ControlBase866   bool unreachable() const { return reachability == kUnreachable; }
867 
868   // Return the reachability of new control structs started in this block.
innerReachabilityv8::internal::wasm::ControlBase869   Reachability innerReachability() const {
870     return reachability == kReachable ? kReachable : kSpecOnlyReachable;
871   }
872 
is_ifv8::internal::wasm::ControlBase873   bool is_if() const { return is_onearmed_if() || is_if_else(); }
is_onearmed_ifv8::internal::wasm::ControlBase874   bool is_onearmed_if() const { return kind == kControlIf; }
is_if_elsev8::internal::wasm::ControlBase875   bool is_if_else() const { return kind == kControlIfElse; }
is_blockv8::internal::wasm::ControlBase876   bool is_block() const { return kind == kControlBlock; }
is_letv8::internal::wasm::ControlBase877   bool is_let() const { return kind == kControlLet; }
is_loopv8::internal::wasm::ControlBase878   bool is_loop() const { return kind == kControlLoop; }
is_incomplete_tryv8::internal::wasm::ControlBase879   bool is_incomplete_try() const { return kind == kControlTry; }
is_try_catchv8::internal::wasm::ControlBase880   bool is_try_catch() const { return kind == kControlTryCatch; }
is_try_catchallv8::internal::wasm::ControlBase881   bool is_try_catchall() const { return kind == kControlTryCatchAll; }
is_tryv8::internal::wasm::ControlBase882   bool is_try() const {
883     return is_incomplete_try() || is_try_catch() || is_try_catchall();
884   }
885 
br_mergev8::internal::wasm::ControlBase886   Merge<Value>* br_merge() {
887     return is_loop() ? &this->start_merge : &this->end_merge;
888   }
889 };
890 
891 // This is the list of callback functions that an interface for the
892 // WasmFullDecoder should implement.
893 // F(Name, args...)
894 #define INTERFACE_FUNCTIONS(F)    \
895   INTERFACE_META_FUNCTIONS(F)     \
896   INTERFACE_CONSTANT_FUNCTIONS(F) \
897   INTERFACE_NON_CONSTANT_FUNCTIONS(F)
898 
899 #define INTERFACE_META_FUNCTIONS(F)    \
900   F(StartFunction)                     \
901   F(StartFunctionBody, Control* block) \
902   F(FinishFunction)                    \
903   F(OnFirstError)                      \
904   F(NextInstruction, WasmOpcode)       \
905   F(Forward, const Value& from, Value* to)
906 
907 #define INTERFACE_CONSTANT_FUNCTIONS(F)                                   \
908   F(I32Const, Value* result, int32_t value)                               \
909   F(I64Const, Value* result, int64_t value)                               \
910   F(F32Const, Value* result, float value)                                 \
911   F(F64Const, Value* result, double value)                                \
912   F(S128Const, Simd128Immediate<validate>& imm, Value* result)            \
913   F(BinOp, WasmOpcode opcode, const Value& lhs, const Value& rhs,         \
914     Value* result)                                                        \
915   F(RefNull, ValueType type, Value* result)                               \
916   F(RefFunc, uint32_t function_index, Value* result)                      \
917   F(GlobalGet, Value* result, const GlobalIndexImmediate<validate>& imm)  \
918   F(StructNewWithRtt, const StructIndexImmediate<validate>& imm,          \
919     const Value& rtt, const Value args[], Value* result)                  \
920   F(StructNewDefault, const StructIndexImmediate<validate>& imm,          \
921     const Value& rtt, Value* result)                                      \
922   F(ArrayInit, const ArrayIndexImmediate<validate>& imm,                  \
923     const base::Vector<Value>& elements, const Value& rtt, Value* result) \
924   F(ArrayInitFromData, const ArrayIndexImmediate<validate>& array_imm,    \
925     const IndexImmediate<validate>& data_segment, const Value& offset,    \
926     const Value& length, const Value& rtt, Value* result)                 \
927   F(RttCanon, uint32_t type_index, Value* result)                         \
928   F(DoReturn, uint32_t drop_values)
929 
930 #define INTERFACE_NON_CONSTANT_FUNCTIONS(F) /*       force 80 columns       */ \
931   /* Control: */                                                               \
932   F(Block, Control* block)                                                     \
933   F(Loop, Control* block)                                                      \
934   F(Try, Control* block)                                                       \
935   F(If, const Value& cond, Control* if_block)                                  \
936   F(FallThruTo, Control* c)                                                    \
937   F(PopControl, Control* block)                                                \
938   /* Instructions: */                                                          \
939   F(UnOp, WasmOpcode opcode, const Value& value, Value* result)                \
940   F(RefAsNonNull, const Value& arg, Value* result)                             \
941   F(Drop)                                                                      \
942   F(LocalGet, Value* result, const IndexImmediate<validate>& imm)              \
943   F(LocalSet, const Value& value, const IndexImmediate<validate>& imm)         \
944   F(LocalTee, const Value& value, Value* result,                               \
945     const IndexImmediate<validate>& imm)                                       \
946   F(AllocateLocals, base::Vector<Value> local_values)                          \
947   F(DeallocateLocals, uint32_t count)                                          \
948   F(GlobalSet, const Value& value, const GlobalIndexImmediate<validate>& imm)  \
949   F(TableGet, const Value& index, Value* result,                               \
950     const IndexImmediate<validate>& imm)                                       \
951   F(TableSet, const Value& index, const Value& value,                          \
952     const IndexImmediate<validate>& imm)                                       \
953   F(Trap, TrapReason reason)                                                   \
954   F(NopForTestingUnsupportedInLiftoff)                                         \
955   F(Select, const Value& cond, const Value& fval, const Value& tval,           \
956     Value* result)                                                             \
957   F(BrOrRet, uint32_t depth, uint32_t drop_values)                             \
958   F(BrIf, const Value& cond, uint32_t depth)                                   \
959   F(BrTable, const BranchTableImmediate<validate>& imm, const Value& key)      \
960   F(Else, Control* if_block)                                                   \
961   F(LoadMem, LoadType type, const MemoryAccessImmediate<validate>& imm,        \
962     const Value& index, Value* result)                                         \
963   F(LoadTransform, LoadType type, LoadTransformationKind transform,            \
964     const MemoryAccessImmediate<validate>& imm, const Value& index,            \
965     Value* result)                                                             \
966   F(LoadLane, LoadType type, const Value& value, const Value& index,           \
967     const MemoryAccessImmediate<validate>& imm, const uint8_t laneidx,         \
968     Value* result)                                                             \
969   F(StoreMem, StoreType type, const MemoryAccessImmediate<validate>& imm,      \
970     const Value& index, const Value& value)                                    \
971   F(StoreLane, StoreType type, const MemoryAccessImmediate<validate>& imm,     \
972     const Value& index, const Value& value, const uint8_t laneidx)             \
973   F(CurrentMemoryPages, Value* result)                                         \
974   F(MemoryGrow, const Value& value, Value* result)                             \
975   F(CallDirect, const CallFunctionImmediate<validate>& imm,                    \
976     const Value args[], Value returns[])                                       \
977   F(CallIndirect, const Value& index,                                          \
978     const CallIndirectImmediate<validate>& imm, const Value args[],            \
979     Value returns[])                                                           \
980   F(CallRef, const Value& func_ref, const FunctionSig* sig,                    \
981     uint32_t sig_index, const Value args[], const Value returns[])             \
982   F(ReturnCallRef, const Value& func_ref, const FunctionSig* sig,              \
983     uint32_t sig_index, const Value args[])                                    \
984   F(ReturnCall, const CallFunctionImmediate<validate>& imm,                    \
985     const Value args[])                                                        \
986   F(ReturnCallIndirect, const Value& index,                                    \
987     const CallIndirectImmediate<validate>& imm, const Value args[])            \
988   F(BrOnNull, const Value& ref_object, uint32_t depth,                         \
989     bool pass_null_along_branch, Value* result_on_fallthrough)                 \
990   F(BrOnNonNull, const Value& ref_object, uint32_t depth)                      \
991   F(SimdOp, WasmOpcode opcode, base::Vector<Value> args, Value* result)        \
992   F(SimdLaneOp, WasmOpcode opcode, const SimdLaneImmediate<validate>& imm,     \
993     const base::Vector<Value> inputs, Value* result)                           \
994   F(S128Const, const Simd128Immediate<validate>& imm, Value* result)           \
995   F(Simd8x16ShuffleOp, const Simd128Immediate<validate>& imm,                  \
996     const Value& input0, const Value& input1, Value* result)                   \
997   F(Throw, const TagIndexImmediate<validate>& imm,                             \
998     const base::Vector<Value>& args)                                           \
999   F(Rethrow, Control* block)                                                   \
1000   F(CatchException, const TagIndexImmediate<validate>& imm, Control* block,    \
1001     base::Vector<Value> caught_values)                                         \
1002   F(Delegate, uint32_t depth, Control* block)                                  \
1003   F(CatchAll, Control* block)                                                  \
1004   F(AtomicOp, WasmOpcode opcode, base::Vector<Value> args,                     \
1005     const MemoryAccessImmediate<validate>& imm, Value* result)                 \
1006   F(AtomicFence)                                                               \
1007   F(MemoryInit, const MemoryInitImmediate<validate>& imm, const Value& dst,    \
1008     const Value& src, const Value& size)                                       \
1009   F(DataDrop, const IndexImmediate<validate>& imm)                             \
1010   F(MemoryCopy, const MemoryCopyImmediate<validate>& imm, const Value& dst,    \
1011     const Value& src, const Value& size)                                       \
1012   F(MemoryFill, const MemoryIndexImmediate<validate>& imm, const Value& dst,   \
1013     const Value& value, const Value& size)                                     \
1014   F(TableInit, const TableInitImmediate<validate>& imm,                        \
1015     base::Vector<Value> args)                                                  \
1016   F(ElemDrop, const IndexImmediate<validate>& imm)                             \
1017   F(TableCopy, const TableCopyImmediate<validate>& imm,                        \
1018     base::Vector<Value> args)                                                  \
1019   F(TableGrow, const IndexImmediate<validate>& imm, const Value& value,        \
1020     const Value& delta, Value* result)                                         \
1021   F(TableSize, const IndexImmediate<validate>& imm, Value* result)             \
1022   F(TableFill, const IndexImmediate<validate>& imm, const Value& start,        \
1023     const Value& value, const Value& count)                                    \
1024   F(StructGet, const Value& struct_object,                                     \
1025     const FieldImmediate<validate>& field, bool is_signed, Value* result)      \
1026   F(StructSet, const Value& struct_object,                                     \
1027     const FieldImmediate<validate>& field, const Value& field_value)           \
1028   F(ArrayNewWithRtt, const ArrayIndexImmediate<validate>& imm,                 \
1029     const Value& length, const Value& initial_value, const Value& rtt,         \
1030     Value* result)                                                             \
1031   F(ArrayNewDefault, const ArrayIndexImmediate<validate>& imm,                 \
1032     const Value& length, const Value& rtt, Value* result)                      \
1033   F(ArrayGet, const Value& array_obj,                                          \
1034     const ArrayIndexImmediate<validate>& imm, const Value& index,              \
1035     bool is_signed, Value* result)                                             \
1036   F(ArraySet, const Value& array_obj,                                          \
1037     const ArrayIndexImmediate<validate>& imm, const Value& index,              \
1038     const Value& value)                                                        \
1039   F(ArrayLen, const Value& array_obj, Value* result)                           \
1040   F(ArrayCopy, const Value& src, const Value& src_index, const Value& dst,     \
1041     const Value& dst_index, const Value& length)                               \
1042   F(I31New, const Value& input, Value* result)                                 \
1043   F(I31GetS, const Value& input, Value* result)                                \
1044   F(I31GetU, const Value& input, Value* result)                                \
1045   F(RefTest, const Value& obj, const Value& rtt, Value* result)                \
1046   F(RefCast, const Value& obj, const Value& rtt, Value* result)                \
1047   F(AssertNull, const Value& obj, Value* result)                               \
1048   F(BrOnCast, const Value& obj, const Value& rtt, Value* result_on_branch,     \
1049     uint32_t depth)                                                            \
1050   F(BrOnCastFail, const Value& obj, const Value& rtt,                          \
1051     Value* result_on_fallthrough, uint32_t depth)                              \
1052   F(RefIsFunc, const Value& object, Value* result)                             \
1053   F(RefIsData, const Value& object, Value* result)                             \
1054   F(RefIsI31, const Value& object, Value* result)                              \
1055   F(RefIsArray, const Value& object, Value* result)                            \
1056   F(RefAsFunc, const Value& object, Value* result)                             \
1057   F(RefAsData, const Value& object, Value* result)                             \
1058   F(RefAsI31, const Value& object, Value* result)                              \
1059   F(RefAsArray, const Value& object, Value* result)                            \
1060   F(BrOnFunc, const Value& object, Value* value_on_branch, uint32_t br_depth)  \
1061   F(BrOnData, const Value& object, Value* value_on_branch, uint32_t br_depth)  \
1062   F(BrOnI31, const Value& object, Value* value_on_branch, uint32_t br_depth)   \
1063   F(BrOnArray, const Value& object, Value* value_on_branch, uint32_t br_depth) \
1064   F(BrOnNonFunc, const Value& object, Value* value_on_fallthrough,             \
1065     uint32_t br_depth)                                                         \
1066   F(BrOnNonData, const Value& object, Value* value_on_fallthrough,             \
1067     uint32_t br_depth)                                                         \
1068   F(BrOnNonI31, const Value& object, Value* value_on_fallthrough,              \
1069     uint32_t br_depth)                                                         \
1070   F(BrOnNonArray, const Value& object, Value* value_on_fallthrough,            \
1071     uint32_t br_depth)
1072 
1073 // Generic Wasm bytecode decoder with utilities for decoding immediates,
1074 // lengths, etc.
1075 template <Decoder::ValidateFlag validate, DecodingMode decoding_mode>
1076 class WasmDecoder : public Decoder {
1077  public:
WasmDecoder(Zone* zone, const WasmModule* module, const WasmFeatures& enabled, WasmFeatures* detected, const FunctionSig* sig, const byte* start, const byte* end, uint32_t buffer_offset = 0)1078   WasmDecoder(Zone* zone, const WasmModule* module, const WasmFeatures& enabled,
1079               WasmFeatures* detected, const FunctionSig* sig, const byte* start,
1080               const byte* end, uint32_t buffer_offset = 0)
1081       : Decoder(start, end, buffer_offset),
1082         local_types_(zone),
1083         initialized_locals_(zone),
1084         locals_initializers_stack_(zone),
1085         module_(module),
1086         enabled_(enabled),
1087         detected_(detected),
1088         sig_(sig) {}
1089 
zone() const1090   Zone* zone() const { return local_types_.get_allocator().zone(); }
1091 
num_locals() const1092   uint32_t num_locals() const {
1093     DCHECK_EQ(num_locals_, local_types_.size());
1094     return num_locals_;
1095   }
1096 
local_type(uint32_t index) const1097   ValueType local_type(uint32_t index) const { return local_types_[index]; }
1098 
InitializeLocalsFromSig()1099   void InitializeLocalsFromSig() {
1100     DCHECK_NOT_NULL(sig_);
1101     DCHECK_EQ(0, this->local_types_.size());
1102     local_types_.assign(sig_->parameters().begin(), sig_->parameters().end());
1103     num_locals_ = static_cast<uint32_t>(sig_->parameters().size());
1104   }
1105 
1106   // Decodes local definitions in the current decoder.
1107   // Returns the number of newly defined locals, or -1 if decoding failed.
1108   // Writes the total length of decoded locals in {total_length}.
1109   // If {insert_position} is defined, the decoded locals will be inserted into
1110   // the {this->local_types_}. The decoder's pc is not advanced.
DecodeLocals(const byte* pc, uint32_t* total_length, const base::Optional<uint32_t> insert_position)1111   int DecodeLocals(const byte* pc, uint32_t* total_length,
1112                    const base::Optional<uint32_t> insert_position) {
1113     uint32_t length;
1114     *total_length = 0;
1115     int total_count = 0;
1116 
1117     // The 'else' value is useless, we pass it for convenience.
1118     auto insert_iterator = insert_position.has_value()
1119                                ? local_types_.begin() + insert_position.value()
1120                                : local_types_.begin();
1121 
1122     // Decode local declarations, if any.
1123     uint32_t entries = read_u32v<validate>(pc, &length, "local decls count");
1124     if (!VALIDATE(ok())) {
1125       DecodeError(pc + *total_length, "invalid local decls count");
1126       return -1;
1127     }
1128     *total_length += length;
1129     TRACE("local decls count: %u\n", entries);
1130 
1131     while (entries-- > 0) {
1132       if (!VALIDATE(more())) {
1133         DecodeError(end(),
1134                     "expected more local decls but reached end of input");
1135         return -1;
1136       }
1137 
1138       uint32_t count =
1139           read_u32v<validate>(pc + *total_length, &length, "local count");
1140       if (!VALIDATE(ok())) {
1141         DecodeError(pc + *total_length, "invalid local count");
1142         return -1;
1143       }
1144       DCHECK_LE(local_types_.size(), kV8MaxWasmFunctionLocals);
1145       if (!VALIDATE(count <= kV8MaxWasmFunctionLocals - local_types_.size())) {
1146         DecodeError(pc + *total_length, "local count too large");
1147         return -1;
1148       }
1149       *total_length += length;
1150 
1151       ValueType type = value_type_reader::read_value_type<validate>(
1152           this, pc + *total_length, &length, this->module_, enabled_);
1153       if (!VALIDATE(type != kWasmBottom)) return -1;
1154       *total_length += length;
1155       total_count += count;
1156 
1157       if (insert_position.has_value()) {
1158         // Move the insertion iterator to the end of the newly inserted locals.
1159         insert_iterator =
1160             local_types_.insert(insert_iterator, count, type) + count;
1161         num_locals_ += count;
1162       }
1163     }
1164 
1165     DCHECK(ok());
1166     return total_count;
1167   }
1168 
1169   // Shorthand that forwards to the {DecodeError} functions above, passing our
1170   // {validate} flag.
1171   template <typename... Args>
DecodeError(Args.... args)1172   void DecodeError(Args... args) {
1173     wasm::DecodeError<validate>(this, std::forward<Args>(args)...);
1174   }
1175 
1176   // Returns a BitVector of length {locals_count + 1} representing the set of
1177   // variables that are assigned in the loop starting at {pc}. The additional
1178   // position at the end of the vector represents possible assignments to
1179   // the instance cache.
AnalyzeLoopAssignment(WasmDecoder* decoder, const byte* pc, uint32_t locals_count, Zone* zone)1180   static BitVector* AnalyzeLoopAssignment(WasmDecoder* decoder, const byte* pc,
1181                                           uint32_t locals_count, Zone* zone) {
1182     if (pc >= decoder->end()) return nullptr;
1183     if (*pc != kExprLoop) return nullptr;
1184     // The number of locals_count is augmented by 1 so that the 'locals_count'
1185     // index can be used to track the instance cache.
1186     BitVector* assigned = zone->New<BitVector>(locals_count + 1, zone);
1187     int depth = -1;  // We will increment the depth to 0 when we decode the
1188                      // starting 'loop' opcode.
1189     // Since 'let' can add additional locals at the beginning of the locals
1190     // index space, we need to track this offset for every depth up to the
1191     // current depth.
1192     base::SmallVector<uint32_t, 8> local_offsets(8);
1193     // Iteratively process all AST nodes nested inside the loop.
1194     while (pc < decoder->end() && VALIDATE(decoder->ok())) {
1195       WasmOpcode opcode = static_cast<WasmOpcode>(*pc);
1196       switch (opcode) {
1197         case kExprLoop:
1198         case kExprIf:
1199         case kExprBlock:
1200         case kExprTry:
1201           depth++;
1202           local_offsets.resize_no_init(depth + 1);
1203           // No additional locals.
1204           local_offsets[depth] = depth > 0 ? local_offsets[depth - 1] : 0;
1205           break;
1206         case kExprLet: {
1207           depth++;
1208           local_offsets.resize_no_init(depth + 1);
1209           BlockTypeImmediate<validate> imm(WasmFeatures::All(), decoder, pc + 1,
1210                                            nullptr);
1211           uint32_t locals_length;
1212           int new_locals_count = decoder->DecodeLocals(
1213               pc + 1 + imm.length, &locals_length, base::Optional<uint32_t>());
1214           local_offsets[depth] = local_offsets[depth - 1] + new_locals_count;
1215           break;
1216         }
1217         case kExprLocalSet:
1218         case kExprLocalTee: {
1219           IndexImmediate<validate> imm(decoder, pc + 1, "local index");
1220           // Unverified code might have an out-of-bounds index.
1221           if (imm.index >= local_offsets[depth] &&
1222               imm.index - local_offsets[depth] < locals_count) {
1223             assigned->Add(imm.index - local_offsets[depth]);
1224           }
1225           break;
1226         }
1227         case kExprMemoryGrow:
1228         case kExprCallFunction:
1229         case kExprCallIndirect:
1230         case kExprCallRef:
1231           // Add instance cache to the assigned set.
1232           assigned->Add(locals_count);
1233           break;
1234         case kExprEnd:
1235           depth--;
1236           break;
1237         default:
1238           break;
1239       }
1240       if (depth < 0) break;
1241       pc += OpcodeLength(decoder, pc);
1242     }
1243     return VALIDATE(decoder->ok()) ? assigned : nullptr;
1244   }
1245 
Validate(const byte* pc, TagIndexImmediate<validate>& imm)1246   bool Validate(const byte* pc, TagIndexImmediate<validate>& imm) {
1247     if (!VALIDATE(imm.index < module_->tags.size())) {
1248       DecodeError(pc, "Invalid tag index: %u", imm.index);
1249       return false;
1250     }
1251     imm.tag = &module_->tags[imm.index];
1252     return true;
1253   }
1254 
Validate(const byte* pc, GlobalIndexImmediate<validate>& imm)1255   bool Validate(const byte* pc, GlobalIndexImmediate<validate>& imm) {
1256     // We compare with the current size of the globals vector. This is important
1257     // if we are decoding a constant expression in the global section.
1258     if (!VALIDATE(imm.index < module_->globals.size())) {
1259       DecodeError(pc, "Invalid global index: %u", imm.index);
1260       return false;
1261     }
1262     imm.global = &module_->globals[imm.index];
1263 
1264     if (decoding_mode == kInitExpression) {
1265       if (!VALIDATE(!imm.global->mutability)) {
1266         this->DecodeError(pc,
1267                           "mutable globals cannot be used in initializer "
1268                           "expressions");
1269         return false;
1270       }
1271       if (!VALIDATE(imm.global->imported || this->enabled_.has_gc())) {
1272         this->DecodeError(
1273             pc,
1274             "non-imported globals cannot be used in initializer expressions");
1275         return false;
1276       }
1277     }
1278 
1279     return true;
1280   }
1281 
Validate(const byte* pc, StructIndexImmediate<validate>& imm)1282   bool Validate(const byte* pc, StructIndexImmediate<validate>& imm) {
1283     if (!VALIDATE(module_->has_struct(imm.index))) {
1284       DecodeError(pc, "invalid struct index: %u", imm.index);
1285       return false;
1286     }
1287     imm.struct_type = module_->struct_type(imm.index);
1288     return true;
1289   }
1290 
Validate(const byte* pc, FieldImmediate<validate>& imm)1291   bool Validate(const byte* pc, FieldImmediate<validate>& imm) {
1292     if (!Validate(pc, imm.struct_imm)) return false;
1293     if (!VALIDATE(imm.field_imm.index <
1294                   imm.struct_imm.struct_type->field_count())) {
1295       DecodeError(pc + imm.struct_imm.length, "invalid field index: %u",
1296                   imm.field_imm.index);
1297       return false;
1298     }
1299     return true;
1300   }
1301 
Validate(const byte* pc, ArrayIndexImmediate<validate>& imm)1302   bool Validate(const byte* pc, ArrayIndexImmediate<validate>& imm) {
1303     if (!VALIDATE(module_->has_array(imm.index))) {
1304       DecodeError(pc, "invalid array index: %u", imm.index);
1305       return false;
1306     }
1307     imm.array_type = module_->array_type(imm.index);
1308     return true;
1309   }
1310 
CanReturnCall(const FunctionSig* target_sig)1311   bool CanReturnCall(const FunctionSig* target_sig) {
1312     if (sig_->return_count() != target_sig->return_count()) return false;
1313     auto target_sig_it = target_sig->returns().begin();
1314     for (ValueType ret_type : sig_->returns()) {
1315       if (!IsSubtypeOf(*target_sig_it++, ret_type, this->module_)) return false;
1316     }
1317     return true;
1318   }
1319 
Validate(const byte* pc, CallFunctionImmediate<validate>& imm)1320   bool Validate(const byte* pc, CallFunctionImmediate<validate>& imm) {
1321     if (!VALIDATE(imm.index < module_->functions.size())) {
1322       DecodeError(pc, "function index #%u is out of bounds", imm.index);
1323       return false;
1324     }
1325     imm.sig = module_->functions[imm.index].sig;
1326     return true;
1327   }
1328 
Validate(const byte* pc, CallIndirectImmediate<validate>& imm)1329   bool Validate(const byte* pc, CallIndirectImmediate<validate>& imm) {
1330     if (!ValidateSignature(pc, imm.sig_imm)) return false;
1331     if (!ValidateTable(pc + imm.sig_imm.length, imm.table_imm)) {
1332       return false;
1333     }
1334     ValueType table_type = module_->tables[imm.table_imm.index].type;
1335     if (!VALIDATE(IsSubtypeOf(table_type, kWasmFuncRef, module_))) {
1336       DecodeError(
1337           pc, "call_indirect: immediate table #%u is not of a function type",
1338           imm.table_imm.index);
1339       return false;
1340     }
1341 
1342     // Check that the dynamic signature for this call is a subtype of the static
1343     // type of the table the function is defined in.
1344     ValueType immediate_type = ValueType::Ref(imm.sig_imm.index, kNonNullable);
1345     if (!VALIDATE(IsSubtypeOf(immediate_type, table_type, module_))) {
1346       DecodeError(pc,
1347                   "call_indirect: Immediate signature #%u is not a subtype of "
1348                   "immediate table #%u",
1349                   imm.sig_imm.index, imm.table_imm.index);
1350       return false;
1351     }
1352 
1353     imm.sig = module_->signature(imm.sig_imm.index);
1354     return true;
1355   }
1356 
Validate(const byte* pc, BranchDepthImmediate<validate>& imm, size_t control_depth)1357   bool Validate(const byte* pc, BranchDepthImmediate<validate>& imm,
1358                 size_t control_depth) {
1359     if (!VALIDATE(imm.depth < control_depth)) {
1360       DecodeError(pc, "invalid branch depth: %u", imm.depth);
1361       return false;
1362     }
1363     return true;
1364   }
1365 
Validate(const byte* pc, BranchTableImmediate<validate>& imm, size_t block_depth)1366   bool Validate(const byte* pc, BranchTableImmediate<validate>& imm,
1367                 size_t block_depth) {
1368     if (!VALIDATE(imm.table_count <= kV8MaxWasmFunctionBrTableSize)) {
1369       DecodeError(pc, "invalid table count (> max br_table size): %u",
1370                   imm.table_count);
1371       return false;
1372     }
1373     return checkAvailable(imm.table_count);
1374   }
1375 
Validate(const byte* pc, WasmOpcode opcode, SimdLaneImmediate<validate>& imm)1376   bool Validate(const byte* pc, WasmOpcode opcode,
1377                 SimdLaneImmediate<validate>& imm) {
1378     uint8_t num_lanes = 0;
1379     switch (opcode) {
1380       case kExprF64x2ExtractLane:
1381       case kExprF64x2ReplaceLane:
1382       case kExprI64x2ExtractLane:
1383       case kExprI64x2ReplaceLane:
1384       case kExprS128Load64Lane:
1385       case kExprS128Store64Lane:
1386         num_lanes = 2;
1387         break;
1388       case kExprF32x4ExtractLane:
1389       case kExprF32x4ReplaceLane:
1390       case kExprI32x4ExtractLane:
1391       case kExprI32x4ReplaceLane:
1392       case kExprS128Load32Lane:
1393       case kExprS128Store32Lane:
1394         num_lanes = 4;
1395         break;
1396       case kExprI16x8ExtractLaneS:
1397       case kExprI16x8ExtractLaneU:
1398       case kExprI16x8ReplaceLane:
1399       case kExprS128Load16Lane:
1400       case kExprS128Store16Lane:
1401         num_lanes = 8;
1402         break;
1403       case kExprI8x16ExtractLaneS:
1404       case kExprI8x16ExtractLaneU:
1405       case kExprI8x16ReplaceLane:
1406       case kExprS128Load8Lane:
1407       case kExprS128Store8Lane:
1408         num_lanes = 16;
1409         break;
1410       default:
1411         UNREACHABLE();
1412         break;
1413     }
1414     if (!VALIDATE(imm.lane >= 0 && imm.lane < num_lanes)) {
1415       DecodeError(pc, "invalid lane index");
1416       return false;
1417     } else {
1418       return true;
1419     }
1420   }
1421 
Validate(const byte* pc, Simd128Immediate<validate>& imm)1422   bool Validate(const byte* pc, Simd128Immediate<validate>& imm) {
1423     uint8_t max_lane = 0;
1424     for (uint32_t i = 0; i < kSimd128Size; ++i) {
1425       max_lane = std::max(max_lane, imm.value[i]);
1426     }
1427     // Shuffle indices must be in [0..31] for a 16 lane shuffle.
1428     if (!VALIDATE(max_lane < 2 * kSimd128Size)) {
1429       DecodeError(pc, "invalid shuffle mask");
1430       return false;
1431     }
1432     return true;
1433   }
1434 
Validate(const byte* pc, BlockTypeImmediate<validate>& imm)1435   bool Validate(const byte* pc, BlockTypeImmediate<validate>& imm) {
1436     if (imm.type != kWasmBottom) return true;
1437     if (!VALIDATE(module_->has_signature(imm.sig_index))) {
1438       DecodeError(pc, "block type index %u is not a signature definition",
1439                   imm.sig_index);
1440       return false;
1441     }
1442     imm.sig = module_->signature(imm.sig_index);
1443     return true;
1444   }
1445 
Validate(const byte* pc, MemoryIndexImmediate<validate>& imm)1446   bool Validate(const byte* pc, MemoryIndexImmediate<validate>& imm) {
1447     if (!VALIDATE(this->module_->has_memory)) {
1448       this->DecodeError(pc, "memory instruction with no memory");
1449       return false;
1450     }
1451     if (!VALIDATE(imm.index == uint8_t{0})) {
1452       DecodeError(pc, "expected memory index 0, found %u", imm.index);
1453       return false;
1454     }
1455     return true;
1456   }
1457 
Validate(const byte* pc, MemoryAccessImmediate<validate>& imm)1458   bool Validate(const byte* pc, MemoryAccessImmediate<validate>& imm) {
1459     if (!VALIDATE(this->module_->has_memory)) {
1460       this->DecodeError(pc, "memory instruction with no memory");
1461       return false;
1462     }
1463     return true;
1464   }
1465 
Validate(const byte* pc, MemoryInitImmediate<validate>& imm)1466   bool Validate(const byte* pc, MemoryInitImmediate<validate>& imm) {
1467     return ValidateDataSegment(pc, imm.data_segment) &&
1468            Validate(pc + imm.data_segment.length, imm.memory);
1469   }
1470 
Validate(const byte* pc, MemoryCopyImmediate<validate>& imm)1471   bool Validate(const byte* pc, MemoryCopyImmediate<validate>& imm) {
1472     return Validate(pc, imm.memory_src) &&
1473            Validate(pc + imm.memory_src.length, imm.memory_dst);
1474   }
1475 
Validate(const byte* pc, TableInitImmediate<validate>& imm)1476   bool Validate(const byte* pc, TableInitImmediate<validate>& imm) {
1477     if (!ValidateElementSegment(pc, imm.element_segment)) return false;
1478     if (!ValidateTable(pc + imm.element_segment.length, imm.table)) {
1479       return false;
1480     }
1481     ValueType elem_type =
1482         module_->elem_segments[imm.element_segment.index].type;
1483     if (!VALIDATE(IsSubtypeOf(elem_type, module_->tables[imm.table.index].type,
1484                               module_))) {
1485       DecodeError(pc, "table %u is not a super-type of %s", imm.table.index,
1486                   elem_type.name().c_str());
1487       return false;
1488     }
1489     return true;
1490   }
1491 
Validate(const byte* pc, TableCopyImmediate<validate>& imm)1492   bool Validate(const byte* pc, TableCopyImmediate<validate>& imm) {
1493     if (!ValidateTable(pc, imm.table_src)) return false;
1494     if (!ValidateTable(pc + imm.table_src.length, imm.table_dst)) return false;
1495     ValueType src_type = module_->tables[imm.table_src.index].type;
1496     if (!VALIDATE(IsSubtypeOf(
1497             src_type, module_->tables[imm.table_dst.index].type, module_))) {
1498       DecodeError(pc, "table %u is not a super-type of %s", imm.table_dst.index,
1499                   src_type.name().c_str());
1500       return false;
1501     }
1502     return true;
1503   }
1504 
1505   // The following Validate* functions all validate an IndexImmediate, albeit
1506   // differently according to context.
ValidateTable(const byte* pc, IndexImmediate<validate>& imm)1507   bool ValidateTable(const byte* pc, IndexImmediate<validate>& imm) {
1508     if (imm.index > 0 || imm.length > 1) {
1509       this->detected_->Add(kFeature_reftypes);
1510     }
1511     if (!VALIDATE(imm.index < module_->tables.size())) {
1512       DecodeError(pc, "invalid table index: %u", imm.index);
1513       return false;
1514     }
1515     return true;
1516   }
1517 
ValidateElementSegment(const byte* pc, IndexImmediate<validate>& imm)1518   bool ValidateElementSegment(const byte* pc, IndexImmediate<validate>& imm) {
1519     if (!VALIDATE(imm.index < module_->elem_segments.size())) {
1520       DecodeError(pc, "invalid element segment index: %u", imm.index);
1521       return false;
1522     }
1523     return true;
1524   }
1525 
ValidateLocal(const byte* pc, IndexImmediate<validate>& imm)1526   bool ValidateLocal(const byte* pc, IndexImmediate<validate>& imm) {
1527     if (!VALIDATE(imm.index < num_locals())) {
1528       DecodeError(pc, "invalid local index: %u", imm.index);
1529       return false;
1530     }
1531     return true;
1532   }
1533 
ValidateType(const byte* pc, IndexImmediate<validate>& imm)1534   bool ValidateType(const byte* pc, IndexImmediate<validate>& imm) {
1535     if (!VALIDATE(module_->has_type(imm.index))) {
1536       DecodeError(pc, "invalid type index: %u", imm.index);
1537       return false;
1538     }
1539     return true;
1540   }
1541 
ValidateSignature(const byte* pc, IndexImmediate<validate>& imm)1542   bool ValidateSignature(const byte* pc, IndexImmediate<validate>& imm) {
1543     if (!VALIDATE(module_->has_signature(imm.index))) {
1544       DecodeError(pc, "invalid signature index: %u", imm.index);
1545       return false;
1546     }
1547     return true;
1548   }
1549 
ValidateFunction(const byte* pc, IndexImmediate<validate>& imm)1550   bool ValidateFunction(const byte* pc, IndexImmediate<validate>& imm) {
1551     if (!VALIDATE(imm.index < module_->functions.size())) {
1552       DecodeError(pc, "function index #%u is out of bounds", imm.index);
1553       return false;
1554     }
1555     if (decoding_mode == kFunctionBody &&
1556         !VALIDATE(module_->functions[imm.index].declared)) {
1557       DecodeError(pc, "undeclared reference to function #%u", imm.index);
1558       return false;
1559     }
1560     return true;
1561   }
1562 
ValidateDataSegment(const byte* pc, IndexImmediate<validate>& imm)1563   bool ValidateDataSegment(const byte* pc, IndexImmediate<validate>& imm) {
1564     if (!VALIDATE(imm.index < module_->num_declared_data_segments)) {
1565       DecodeError(pc, "invalid data segment index: %u", imm.index);
1566       return false;
1567     }
1568     return true;
1569   }
1570 
1571   // Returns the length of the opcode under {pc}.
OpcodeLength(WasmDecoder* decoder, const byte* pc)1572   static uint32_t OpcodeLength(WasmDecoder* decoder, const byte* pc) {
1573     WasmOpcode opcode = static_cast<WasmOpcode>(*pc);
1574     // We don't have information about the module here, so we just assume that
1575     // memory64 is enabled when parsing memory access immediates. This is
1576     // backwards-compatible; decode errors will be detected at another time when
1577     // actually decoding that opcode.
1578     constexpr bool kConservativelyAssumeMemory64 = true;
1579     switch (opcode) {
1580       /********** Control opcodes **********/
1581       case kExprUnreachable:
1582       case kExprNop:
1583       case kExprNopForTestingUnsupportedInLiftoff:
1584       case kExprElse:
1585       case kExprEnd:
1586       case kExprReturn:
1587         return 1;
1588       case kExprTry:
1589       case kExprIf:
1590       case kExprLoop:
1591       case kExprBlock: {
1592         BlockTypeImmediate<validate> imm(WasmFeatures::All(), decoder, pc + 1,
1593                                          nullptr);
1594         return 1 + imm.length;
1595       }
1596       case kExprRethrow:
1597       case kExprBr:
1598       case kExprBrIf:
1599       case kExprBrOnNull:
1600       case kExprBrOnNonNull:
1601       case kExprDelegate: {
1602         BranchDepthImmediate<validate> imm(decoder, pc + 1);
1603         return 1 + imm.length;
1604       }
1605       case kExprBrTable: {
1606         BranchTableImmediate<validate> imm(decoder, pc + 1);
1607         BranchTableIterator<validate> iterator(decoder, imm);
1608         return 1 + iterator.length();
1609       }
1610       case kExprThrow:
1611       case kExprCatch: {
1612         TagIndexImmediate<validate> imm(decoder, pc + 1);
1613         return 1 + imm.length;
1614       }
1615       case kExprLet: {
1616         BlockTypeImmediate<validate> imm(WasmFeatures::All(), decoder, pc + 1,
1617                                          nullptr);
1618         uint32_t locals_length;
1619         int new_locals_count = decoder->DecodeLocals(
1620             pc + 1 + imm.length, &locals_length, base::Optional<uint32_t>());
1621         return 1 + imm.length + ((new_locals_count >= 0) ? locals_length : 0);
1622       }
1623 
1624       /********** Misc opcodes **********/
1625       case kExprCallFunction:
1626       case kExprReturnCall: {
1627         CallFunctionImmediate<validate> imm(decoder, pc + 1);
1628         return 1 + imm.length;
1629       }
1630       case kExprCallIndirect:
1631       case kExprReturnCallIndirect: {
1632         CallIndirectImmediate<validate> imm(decoder, pc + 1);
1633         return 1 + imm.length;
1634       }
1635       case kExprCallRef:
1636       case kExprReturnCallRef:
1637       case kExprDrop:
1638       case kExprSelect:
1639       case kExprCatchAll:
1640         return 1;
1641       case kExprSelectWithType: {
1642         SelectTypeImmediate<validate> imm(WasmFeatures::All(), decoder, pc + 1,
1643                                           nullptr);
1644         return 1 + imm.length;
1645       }
1646 
1647       case kExprLocalGet:
1648       case kExprLocalSet:
1649       case kExprLocalTee: {
1650         IndexImmediate<validate> imm(decoder, pc + 1, "local index");
1651         return 1 + imm.length;
1652       }
1653       case kExprGlobalGet:
1654       case kExprGlobalSet: {
1655         GlobalIndexImmediate<validate> imm(decoder, pc + 1);
1656         return 1 + imm.length;
1657       }
1658       case kExprTableGet:
1659       case kExprTableSet: {
1660         IndexImmediate<validate> imm(decoder, pc + 1, "table index");
1661         return 1 + imm.length;
1662       }
1663       case kExprI32Const: {
1664         ImmI32Immediate<validate> imm(decoder, pc + 1);
1665         return 1 + imm.length;
1666       }
1667       case kExprI64Const: {
1668         ImmI64Immediate<validate> imm(decoder, pc + 1);
1669         return 1 + imm.length;
1670       }
1671       case kExprF32Const:
1672         return 5;
1673       case kExprF64Const:
1674         return 9;
1675       case kExprRefNull: {
1676         HeapTypeImmediate<validate> imm(WasmFeatures::All(), decoder, pc + 1,
1677                                         nullptr);
1678         return 1 + imm.length;
1679       }
1680       case kExprRefIsNull: {
1681         return 1;
1682       }
1683       case kExprRefFunc: {
1684         IndexImmediate<validate> imm(decoder, pc + 1, "function index");
1685         return 1 + imm.length;
1686       }
1687       case kExprRefAsNonNull:
1688         return 1;
1689 
1690 #define DECLARE_OPCODE_CASE(name, ...) case kExpr##name:
1691         // clang-format off
1692       /********** Simple and memory opcodes **********/
1693       FOREACH_SIMPLE_OPCODE(DECLARE_OPCODE_CASE)
1694       FOREACH_SIMPLE_PROTOTYPE_OPCODE(DECLARE_OPCODE_CASE)
1695         return 1;
1696       FOREACH_LOAD_MEM_OPCODE(DECLARE_OPCODE_CASE)
1697       FOREACH_STORE_MEM_OPCODE(DECLARE_OPCODE_CASE) {
1698         MemoryAccessImmediate<validate> imm(decoder, pc + 1, UINT32_MAX,
1699                                             kConservativelyAssumeMemory64);
1700         return 1 + imm.length;
1701       }
1702       // clang-format on
1703       case kExprMemoryGrow:
1704       case kExprMemorySize: {
1705         MemoryIndexImmediate<validate> imm(decoder, pc + 1);
1706         return 1 + imm.length;
1707       }
1708 
1709       /********** Prefixed opcodes **********/
1710       case kNumericPrefix: {
1711         uint32_t length = 0;
1712         opcode = decoder->read_prefixed_opcode<validate>(pc, &length);
1713         switch (opcode) {
1714           case kExprI32SConvertSatF32:
1715           case kExprI32UConvertSatF32:
1716           case kExprI32SConvertSatF64:
1717           case kExprI32UConvertSatF64:
1718           case kExprI64SConvertSatF32:
1719           case kExprI64UConvertSatF32:
1720           case kExprI64SConvertSatF64:
1721           case kExprI64UConvertSatF64:
1722             return length;
1723           case kExprMemoryInit: {
1724             MemoryInitImmediate<validate> imm(decoder, pc + length);
1725             return length + imm.length;
1726           }
1727           case kExprDataDrop: {
1728             IndexImmediate<validate> imm(decoder, pc + length,
1729                                          "data segment index");
1730             return length + imm.length;
1731           }
1732           case kExprMemoryCopy: {
1733             MemoryCopyImmediate<validate> imm(decoder, pc + length);
1734             return length + imm.length;
1735           }
1736           case kExprMemoryFill: {
1737             MemoryIndexImmediate<validate> imm(decoder, pc + length);
1738             return length + imm.length;
1739           }
1740           case kExprTableInit: {
1741             TableInitImmediate<validate> imm(decoder, pc + length);
1742             return length + imm.length;
1743           }
1744           case kExprElemDrop: {
1745             IndexImmediate<validate> imm(decoder, pc + length,
1746                                          "element segment index");
1747             return length + imm.length;
1748           }
1749           case kExprTableCopy: {
1750             TableCopyImmediate<validate> imm(decoder, pc + length);
1751             return length + imm.length;
1752           }
1753           case kExprTableGrow:
1754           case kExprTableSize:
1755           case kExprTableFill: {
1756             IndexImmediate<validate> imm(decoder, pc + length, "table index");
1757             return length + imm.length;
1758           }
1759           default:
1760             if (validate) {
1761               decoder->DecodeError(pc, "invalid numeric opcode");
1762             }
1763             return length;
1764         }
1765       }
1766       case kSimdPrefix: {
1767         uint32_t length = 0;
1768         opcode = decoder->read_prefixed_opcode<validate>(pc, &length);
1769         switch (opcode) {
1770           // clang-format off
1771           FOREACH_SIMD_0_OPERAND_OPCODE(DECLARE_OPCODE_CASE)
1772             return length;
1773           FOREACH_SIMD_1_OPERAND_OPCODE(DECLARE_OPCODE_CASE)
1774             return length + 1;
1775           FOREACH_SIMD_MEM_OPCODE(DECLARE_OPCODE_CASE) {
1776             MemoryAccessImmediate<validate> imm(decoder, pc + length,
1777                                                 UINT32_MAX,
1778                                                 kConservativelyAssumeMemory64);
1779             return length + imm.length;
1780           }
1781           FOREACH_SIMD_MEM_1_OPERAND_OPCODE(DECLARE_OPCODE_CASE) {
1782             MemoryAccessImmediate<validate> imm(
1783                 decoder, pc + length, UINT32_MAX,
1784                 kConservativelyAssumeMemory64);
1785             // 1 more byte for lane index immediate.
1786             return length + imm.length + 1;
1787           }
1788           // clang-format on
1789           // Shuffles require a byte per lane, or 16 immediate bytes.
1790           case kExprS128Const:
1791           case kExprI8x16Shuffle:
1792             return length + kSimd128Size;
1793           default:
1794             if (validate) {
1795               decoder->DecodeError(pc, "invalid SIMD opcode");
1796             }
1797             return length;
1798         }
1799       }
1800       case kAtomicPrefix: {
1801         uint32_t length = 0;
1802         opcode = decoder->read_prefixed_opcode<validate>(pc, &length,
1803                                                          "atomic_index");
1804         switch (opcode) {
1805           FOREACH_ATOMIC_OPCODE(DECLARE_OPCODE_CASE) {
1806             MemoryAccessImmediate<validate> imm(decoder, pc + length,
1807                                                 UINT32_MAX,
1808                                                 kConservativelyAssumeMemory64);
1809             return length + imm.length;
1810           }
1811           FOREACH_ATOMIC_0_OPERAND_OPCODE(DECLARE_OPCODE_CASE) {
1812             return length + 1;
1813           }
1814           default:
1815             if (validate) {
1816               decoder->DecodeError(pc, "invalid Atomics opcode");
1817             }
1818             return length;
1819         }
1820       }
1821       case kGCPrefix: {
1822         uint32_t length = 0;
1823         opcode =
1824             decoder->read_prefixed_opcode<validate>(pc, &length, "gc_index");
1825         switch (opcode) {
1826           case kExprStructNew:
1827           case kExprStructNewWithRtt:
1828           case kExprStructNewDefault:
1829           case kExprStructNewDefaultWithRtt: {
1830             StructIndexImmediate<validate> imm(decoder, pc + length);
1831             return length + imm.length;
1832           }
1833           case kExprStructGet:
1834           case kExprStructGetS:
1835           case kExprStructGetU:
1836           case kExprStructSet: {
1837             FieldImmediate<validate> imm(decoder, pc + length);
1838             return length + imm.length;
1839           }
1840           case kExprArrayNew:
1841           case kExprArrayNewWithRtt:
1842           case kExprArrayNewDefault:
1843           case kExprArrayNewDefaultWithRtt:
1844           case kExprArrayGet:
1845           case kExprArrayGetS:
1846           case kExprArrayGetU:
1847           case kExprArraySet:
1848           case kExprArrayLen: {
1849             ArrayIndexImmediate<validate> imm(decoder, pc + length);
1850             return length + imm.length;
1851           }
1852           case kExprArrayInit:
1853           case kExprArrayInitStatic: {
1854             ArrayIndexImmediate<validate> array_imm(decoder, pc + length);
1855             IndexImmediate<validate> length_imm(
1856                 decoder, pc + length + array_imm.length, "array length");
1857             return length + array_imm.length + length_imm.length;
1858           }
1859           case kExprArrayCopy: {
1860             ArrayIndexImmediate<validate> dst_imm(decoder, pc + length);
1861             ArrayIndexImmediate<validate> src_imm(decoder,
1862                                                   pc + length + dst_imm.length);
1863             return length + dst_imm.length + src_imm.length;
1864           }
1865           case kExprArrayInitFromData:
1866           case kExprArrayInitFromDataStatic: {
1867             ArrayIndexImmediate<validate> array_imm(decoder, pc + length);
1868             IndexImmediate<validate> data_imm(
1869                 decoder, pc + length + array_imm.length, "data segment index");
1870             return length + array_imm.length + data_imm.length;
1871           }
1872           case kExprBrOnCast:
1873           case kExprBrOnCastFail:
1874           case kExprBrOnData:
1875           case kExprBrOnFunc:
1876           case kExprBrOnI31: {
1877             BranchDepthImmediate<validate> imm(decoder, pc + length);
1878             return length + imm.length;
1879           }
1880           case kExprRttCanon:
1881           case kExprRefTestStatic:
1882           case kExprRefCastStatic:
1883           case kExprBrOnCastStatic:
1884           case kExprBrOnCastStaticFail: {
1885             IndexImmediate<validate> imm(decoder, pc + length, "type index");
1886             return length + imm.length;
1887           }
1888           case kExprI31New:
1889           case kExprI31GetS:
1890           case kExprI31GetU:
1891           case kExprRefAsData:
1892           case kExprRefAsFunc:
1893           case kExprRefAsI31:
1894           case kExprRefIsData:
1895           case kExprRefIsFunc:
1896           case kExprRefIsI31:
1897           case kExprRefTest:
1898           case kExprRefCast:
1899             return length;
1900           default:
1901             // This is unreachable except for malformed modules.
1902             if (validate) {
1903               decoder->DecodeError(pc, "invalid gc opcode");
1904             }
1905             return length;
1906         }
1907       }
1908 
1909         // clang-format off
1910       /********** Asmjs opcodes **********/
1911       FOREACH_ASMJS_COMPAT_OPCODE(DECLARE_OPCODE_CASE)
1912         return 1;
1913 
1914       // Prefixed opcodes (already handled, included here for completeness of
1915       // switch)
1916       FOREACH_SIMD_OPCODE(DECLARE_OPCODE_CASE)
1917       FOREACH_NUMERIC_OPCODE(DECLARE_OPCODE_CASE, DECLARE_OPCODE_CASE)
1918       FOREACH_ATOMIC_OPCODE(DECLARE_OPCODE_CASE)
1919       FOREACH_ATOMIC_0_OPERAND_OPCODE(DECLARE_OPCODE_CASE)
1920       FOREACH_GC_OPCODE(DECLARE_OPCODE_CASE)
1921         UNREACHABLE();
1922         // clang-format on
1923 #undef DECLARE_OPCODE_CASE
1924     }
1925     // Invalid modules will reach this point.
1926     if (validate) {
1927       decoder->DecodeError(pc, "invalid opcode");
1928     }
1929     return 1;
1930   }
1931 
1932   // TODO(clemensb): This is only used by the interpreter; move there.
StackEffect(const byte* pc)1933   V8_EXPORT_PRIVATE std::pair<uint32_t, uint32_t> StackEffect(const byte* pc) {
1934     WasmOpcode opcode = static_cast<WasmOpcode>(*pc);
1935     // Handle "simple" opcodes with a fixed signature first.
1936     const FunctionSig* sig = WasmOpcodes::Signature(opcode);
1937     if (!sig) sig = WasmOpcodes::AsmjsSignature(opcode);
1938     if (sig) return {sig->parameter_count(), sig->return_count()};
1939 
1940 #define DECLARE_OPCODE_CASE(name, opcode, sig) case kExpr##name:
1941     // clang-format off
1942     switch (opcode) {
1943       case kExprSelect:
1944       case kExprSelectWithType:
1945         return {3, 1};
1946       case kExprTableSet:
1947       FOREACH_STORE_MEM_OPCODE(DECLARE_OPCODE_CASE)
1948         return {2, 0};
1949       FOREACH_LOAD_MEM_OPCODE(DECLARE_OPCODE_CASE)
1950       case kExprTableGet:
1951       case kExprLocalTee:
1952       case kExprMemoryGrow:
1953       case kExprRefAsNonNull:
1954       case kExprBrOnNull:
1955       case kExprRefIsNull:
1956         return {1, 1};
1957       case kExprLocalSet:
1958       case kExprGlobalSet:
1959       case kExprDrop:
1960       case kExprBrIf:
1961       case kExprBrTable:
1962       case kExprIf:
1963       case kExprBrOnNonNull:
1964         return {1, 0};
1965       case kExprLocalGet:
1966       case kExprGlobalGet:
1967       case kExprI32Const:
1968       case kExprI64Const:
1969       case kExprF32Const:
1970       case kExprF64Const:
1971       case kExprRefNull:
1972       case kExprRefFunc:
1973       case kExprMemorySize:
1974         return {0, 1};
1975       case kExprCallFunction: {
1976         CallFunctionImmediate<validate> imm(this, pc + 1);
1977         CHECK(Validate(pc + 1, imm));
1978         return {imm.sig->parameter_count(), imm.sig->return_count()};
1979       }
1980       case kExprCallIndirect: {
1981         CallIndirectImmediate<validate> imm(this, pc + 1);
1982         CHECK(Validate(pc + 1, imm));
1983         // Indirect calls pop an additional argument for the table index.
1984         return {imm.sig->parameter_count() + 1,
1985                 imm.sig->return_count()};
1986       }
1987       case kExprThrow: {
1988         TagIndexImmediate<validate> imm(this, pc + 1);
1989         CHECK(Validate(pc + 1, imm));
1990         DCHECK_EQ(0, imm.tag->sig->return_count());
1991         return {imm.tag->sig->parameter_count(), 0};
1992       }
1993       case kExprBr:
1994       case kExprBlock:
1995       case kExprLoop:
1996       case kExprEnd:
1997       case kExprElse:
1998       case kExprTry:
1999       case kExprCatch:
2000       case kExprCatchAll:
2001       case kExprDelegate:
2002       case kExprRethrow:
2003       case kExprNop:
2004       case kExprNopForTestingUnsupportedInLiftoff:
2005       case kExprReturn:
2006       case kExprReturnCall:
2007       case kExprReturnCallIndirect:
2008       case kExprUnreachable:
2009         return {0, 0};
2010       case kExprLet:
2011         // TODO(7748): Implement
2012         return {0, 0};
2013       case kNumericPrefix:
2014       case kAtomicPrefix:
2015       case kSimdPrefix: {
2016         opcode = this->read_prefixed_opcode<validate>(pc);
2017         switch (opcode) {
2018           FOREACH_SIMD_1_OPERAND_1_PARAM_OPCODE(DECLARE_OPCODE_CASE)
2019             return {1, 1};
2020           FOREACH_SIMD_1_OPERAND_2_PARAM_OPCODE(DECLARE_OPCODE_CASE)
2021           FOREACH_SIMD_MASK_OPERAND_OPCODE(DECLARE_OPCODE_CASE)
2022             return {2, 1};
2023           FOREACH_SIMD_CONST_OPCODE(DECLARE_OPCODE_CASE)
2024             return {0, 1};
2025           // Special case numeric opcodes without fixed signature.
2026           case kExprMemoryInit:
2027           case kExprMemoryCopy:
2028           case kExprMemoryFill:
2029             return {3, 0};
2030           case kExprTableGrow:
2031             return {2, 1};
2032           case kExprTableFill:
2033             return {3, 0};
2034           default: {
2035             sig = WasmOpcodes::Signature(opcode);
2036             DCHECK_NOT_NULL(sig);
2037             return {sig->parameter_count(), sig->return_count()};
2038           }
2039         }
2040       }
2041       case kGCPrefix: {
2042         opcode = this->read_prefixed_opcode<validate>(pc);
2043         switch (opcode) {
2044           case kExprStructNewDefaultWithRtt:
2045           case kExprStructGet:
2046           case kExprStructGetS:
2047           case kExprStructGetU:
2048           case kExprI31New:
2049           case kExprI31GetS:
2050           case kExprI31GetU:
2051           case kExprArrayNewDefault:
2052           case kExprArrayLen:
2053           case kExprRefTestStatic:
2054           case kExprRefCastStatic:
2055           case kExprBrOnCastStatic:
2056           case kExprBrOnCastStaticFail:
2057             return {1, 1};
2058           case kExprStructSet:
2059             return {2, 0};
2060           case kExprArrayNew:
2061           case kExprArrayNewDefaultWithRtt:
2062           case kExprArrayInitFromDataStatic:
2063           case kExprArrayGet:
2064           case kExprArrayGetS:
2065           case kExprArrayGetU:
2066           case kExprRefTest:
2067           case kExprRefCast:
2068           case kExprBrOnCast:
2069           case kExprBrOnCastFail:
2070             return {2, 1};
2071           case kExprArraySet:
2072             return {3, 0};
2073           case kExprArrayCopy:
2074             return {5, 0};
2075           case kExprRttCanon:
2076           case kExprStructNewDefault:
2077             return {0, 1};
2078           case kExprArrayNewWithRtt:
2079           case kExprArrayInitFromData:
2080             return {3, 1};
2081           case kExprStructNewWithRtt: {
2082             StructIndexImmediate<validate> imm(this, pc + 2);
2083             CHECK(Validate(pc + 2, imm));
2084             return {imm.struct_type->field_count() + 1, 1};
2085           }
2086           case kExprStructNew: {
2087             StructIndexImmediate<validate> imm(this, pc + 2);
2088             CHECK(Validate(pc + 2, imm));
2089             return {imm.struct_type->field_count(), 1};
2090           }
2091           case kExprArrayInit:
2092           case kExprArrayInitStatic: {
2093             ArrayIndexImmediate<validate> array_imm(this, pc + 2);
2094             IndexImmediate<validate> length_imm(this, pc + 2 + array_imm.length,
2095                                                 "array length");
2096             return {length_imm.index + (opcode == kExprArrayInit ? 1 : 0), 1};
2097           }
2098           default:
2099             UNREACHABLE();
2100         }
2101       }
2102       default:
2103         FATAL("unimplemented opcode: %x (%s)", opcode,
2104               WasmOpcodes::OpcodeName(opcode));
2105         return {0, 0};
2106     }
2107 #undef DECLARE_OPCODE_CASE
2108     // clang-format on
2109   }
2110 
is_local_initialized(uint32_t local_index)2111   bool is_local_initialized(uint32_t local_index) {
2112     return initialized_locals_[local_index];
2113   }
2114 
set_local_initialized(uint32_t local_index)2115   void set_local_initialized(uint32_t local_index) {
2116     if (!enabled_.has_nn_locals()) return;
2117     // This implicitly covers defaultable locals too (which are always
2118     // initialized).
2119     if (is_local_initialized(local_index)) return;
2120     initialized_locals_[local_index] = true;
2121     locals_initializers_stack_.push_back(local_index);
2122   }
2123 
locals_initialization_stack_depth() const2124   uint32_t locals_initialization_stack_depth() const {
2125     return static_cast<uint32_t>(locals_initializers_stack_.size());
2126   }
2127 
RollbackLocalsInitialization(uint32_t previous_stack_height)2128   void RollbackLocalsInitialization(uint32_t previous_stack_height) {
2129     if (!enabled_.has_nn_locals()) return;
2130     while (locals_initializers_stack_.size() > previous_stack_height) {
2131       uint32_t local_index = locals_initializers_stack_.back();
2132       locals_initializers_stack_.pop_back();
2133       initialized_locals_[local_index] = false;
2134     }
2135   }
2136 
InitializeInitializedLocalsTracking(int non_defaultable_locals)2137   void InitializeInitializedLocalsTracking(int non_defaultable_locals) {
2138     initialized_locals_.assign(num_locals_, false);
2139     // Parameters count as initialized...
2140     const uint32_t num_params = static_cast<uint32_t>(sig_->parameter_count());
2141     for (uint32_t i = 0; i < num_params; i++) {
2142       initialized_locals_[i] = true;
2143     }
2144     // ...and so do defaultable locals.
2145     for (uint32_t i = num_params; i < num_locals_; i++) {
2146       if (local_types_[i].is_defaultable()) initialized_locals_[i] = true;
2147     }
2148     if (non_defaultable_locals == 0) return;
2149     locals_initializers_stack_.reserve(non_defaultable_locals);
2150   }
2151 
2152   // The {Zone} is implicitly stored in the {ZoneAllocator} which is part of
2153   // this {ZoneVector}. Hence save one field and just get it from there if
2154   // needed (see {zone()} accessor below).
2155   ZoneVector<ValueType> local_types_;
2156 
2157   // Cached value, for speed (yes, it's measurably faster to load this value
2158   // than to load the start and end pointer from a vector, subtract and shift).
2159   uint32_t num_locals_ = 0;
2160 
2161   // Indicates whether the local with the given index is currently initialized.
2162   // Entries for defaultable locals are meaningless; we have a bit for each
2163   // local because we expect that the effort required to densify this bit
2164   // vector would more than offset the memory savings.
2165   ZoneVector<bool> initialized_locals_;
2166   // Keeps track of initializing assignments to non-defaultable locals that
2167   // happened, so they can be discarded at the end of the current block.
2168   // Contains no duplicates, so the size of this stack is bounded (and pre-
2169   // allocated) to the number of non-defaultable locals in the function.
2170   ZoneVector<uint32_t> locals_initializers_stack_;
2171 
2172   const WasmModule* module_;
2173   const WasmFeatures enabled_;
2174   WasmFeatures* detected_;
2175   const FunctionSig* sig_;
2176 };
2177 
2178 // Only call this in contexts where {current_code_reachable_and_ok_} is known to
2179 // hold.
2180 #define CALL_INTERFACE(name, ...)                         \
2181   do {                                                    \
2182     DCHECK(!control_.empty());                            \
2183     DCHECK(current_code_reachable_and_ok_);               \
2184     DCHECK_EQ(current_code_reachable_and_ok_,             \
2185               this->ok() && control_.back().reachable()); \
2186     interface_.name(this, ##__VA_ARGS__);                 \
2187   } while (false)
2188 #define CALL_INTERFACE_IF_OK_AND_REACHABLE(name, ...)     \
2189   do {                                                    \
2190     DCHECK(!control_.empty());                            \
2191     DCHECK_EQ(current_code_reachable_and_ok_,             \
2192               this->ok() && control_.back().reachable()); \
2193     if (V8_LIKELY(current_code_reachable_and_ok_)) {      \
2194       interface_.name(this, ##__VA_ARGS__);               \
2195     }                                                     \
2196   } while (false)
2197 #define CALL_INTERFACE_IF_OK_AND_PARENT_REACHABLE(name, ...)    \
2198   do {                                                          \
2199     DCHECK(!control_.empty());                                  \
2200     if (VALIDATE(this->ok()) &&                                 \
2201         (control_.size() == 1 || control_at(1)->reachable())) { \
2202       interface_.name(this, ##__VA_ARGS__);                     \
2203     }                                                           \
2204   } while (false)
2205 
2206 template <Decoder::ValidateFlag validate, typename Interface,
2207           DecodingMode decoding_mode = kFunctionBody>
2208 class WasmFullDecoder : public WasmDecoder<validate, decoding_mode> {
2209   using Value = typename Interface::Value;
2210   using Control = typename Interface::Control;
2211   using ArgVector = base::Vector<Value>;
2212   using ReturnVector = base::SmallVector<Value, 2>;
2213 
2214   // All Value types should be trivially copyable for performance. We push, pop,
2215   // and store them in local variables.
2216   ASSERT_TRIVIALLY_COPYABLE(Value);
2217 
2218  public:
2219   template <typename... InterfaceArgs>
WasmFullDecoder(Zone* zone, const WasmModule* module, const WasmFeatures& enabled, WasmFeatures* detected, const FunctionBody& body, InterfaceArgs&&... interface_args)2220   WasmFullDecoder(Zone* zone, const WasmModule* module,
2221                   const WasmFeatures& enabled, WasmFeatures* detected,
2222                   const FunctionBody& body, InterfaceArgs&&... interface_args)
2223       : WasmDecoder<validate, decoding_mode>(zone, module, enabled, detected,
2224                                              body.sig, body.start, body.end,
2225                                              body.offset),
2226         interface_(std::forward<InterfaceArgs>(interface_args)...),
2227         control_(zone) {}
2228 
interface()2229   Interface& interface() { return interface_; }
2230 
Decode()2231   bool Decode() {
2232     DCHECK_EQ(stack_end_, stack_);
2233     DCHECK(control_.empty());
2234     DCHECK_LE(this->pc_, this->end_);
2235     DCHECK_EQ(this->num_locals(), 0);
2236 
2237     locals_offset_ = this->pc_offset();
2238     this->InitializeLocalsFromSig();
2239     uint32_t params_count = static_cast<uint32_t>(this->num_locals());
2240     uint32_t locals_length;
2241     this->DecodeLocals(this->pc(), &locals_length, params_count);
2242     if (this->failed()) return TraceFailed();
2243     this->consume_bytes(locals_length);
2244     int non_defaultable = 0;
2245     for (uint32_t index = params_count; index < this->num_locals(); index++) {
2246       if (!VALIDATE(this->enabled_.has_nn_locals() ||
2247                     this->enabled_.has_unsafe_nn_locals() ||
2248                     this->local_type(index).is_defaultable())) {
2249         this->DecodeError(
2250             "Cannot define function-level local of non-defaultable type %s",
2251             this->local_type(index).name().c_str());
2252         return this->TraceFailed();
2253       }
2254       if (!this->local_type(index).is_defaultable()) non_defaultable++;
2255     }
2256     this->InitializeInitializedLocalsTracking(non_defaultable);
2257 
2258     // Cannot use CALL_INTERFACE_* macros because control is empty.
2259     interface().StartFunction(this);
2260     DecodeFunctionBody();
2261     if (this->failed()) return TraceFailed();
2262 
2263     if (!VALIDATE(control_.empty())) {
2264       if (control_.size() > 1) {
2265         this->DecodeError(control_.back().pc(),
2266                           "unterminated control structure");
2267       } else {
2268         this->DecodeError("function body must end with \"end\" opcode");
2269       }
2270       return TraceFailed();
2271     }
2272     // Cannot use CALL_INTERFACE_* macros because control is empty.
2273     interface().FinishFunction(this);
2274     if (this->failed()) return TraceFailed();
2275 
2276     TRACE("wasm-decode ok\n\n");
2277     return true;
2278   }
2279 
TraceFailed()2280   bool TraceFailed() {
2281     if (this->error_.offset()) {
2282       TRACE("wasm-error module+%-6d func+%d: %s\n\n", this->error_.offset(),
2283             this->GetBufferRelativeOffset(this->error_.offset()),
2284             this->error_.message().c_str());
2285     } else {
2286       TRACE("wasm-error: %s\n\n", this->error_.message().c_str());
2287     }
2288     return false;
2289   }
2290 
SafeOpcodeNameAt(const byte* pc)2291   const char* SafeOpcodeNameAt(const byte* pc) {
2292     if (!pc) return "<null>";
2293     if (pc >= this->end_) return "<end>";
2294     WasmOpcode opcode = static_cast<WasmOpcode>(*pc);
2295     if (!WasmOpcodes::IsPrefixOpcode(opcode)) {
2296       return WasmOpcodes::OpcodeName(static_cast<WasmOpcode>(opcode));
2297     }
2298     opcode = this->template read_prefixed_opcode<Decoder::kFullValidation>(pc);
2299     return WasmOpcodes::OpcodeName(opcode);
2300   }
2301 
position() const2302   WasmCodePosition position() const {
2303     int offset = static_cast<int>(this->pc_ - this->start_);
2304     DCHECK_EQ(this->pc_ - this->start_, offset);  // overflows cannot happen
2305     return offset;
2306   }
2307 
control_depth() const2308   uint32_t control_depth() const {
2309     return static_cast<uint32_t>(control_.size());
2310   }
2311 
control_at(uint32_t depth)2312   Control* control_at(uint32_t depth) {
2313     DCHECK_GT(control_.size(), depth);
2314     return &control_.back() - depth;
2315   }
2316 
stack_size() const2317   uint32_t stack_size() const {
2318     DCHECK_GE(stack_end_, stack_);
2319     DCHECK_GE(kMaxUInt32, stack_end_ - stack_);
2320     return static_cast<uint32_t>(stack_end_ - stack_);
2321   }
2322 
stack_value(uint32_t depth) const2323   Value* stack_value(uint32_t depth) const {
2324     DCHECK_LT(0, depth);
2325     DCHECK_GE(stack_size(), depth);
2326     return stack_end_ - depth;
2327   }
2328 
current_catch() const2329   int32_t current_catch() const { return current_catch_; }
2330 
control_depth_of_current_catch() const2331   uint32_t control_depth_of_current_catch() const {
2332     return control_depth() - 1 - current_catch();
2333   }
2334 
SetSucceedingCodeDynamicallyUnreachable()2335   void SetSucceedingCodeDynamicallyUnreachable() {
2336     Control* current = &control_.back();
2337     if (current->reachable()) {
2338       current->reachability = kSpecOnlyReachable;
2339       current_code_reachable_and_ok_ = false;
2340     }
2341   }
2342 
pc_relative_offset() const2343   uint32_t pc_relative_offset() const {
2344     return this->pc_offset() - locals_offset_;
2345   }
2346 
DecodeFunctionBody()2347   void DecodeFunctionBody() {
2348     TRACE("wasm-decode %p...%p (module+%u, %d bytes)\n", this->start(),
2349           this->end(), this->pc_offset(),
2350           static_cast<int>(this->end() - this->start()));
2351 
2352     // Set up initial function block.
2353     {
2354       DCHECK(control_.empty());
2355       constexpr uint32_t kLocalsCount = 0;
2356       constexpr uint32_t kStackDepth = 0;
2357       constexpr uint32_t kInitStackDepth = 0;
2358       control_.emplace_back(kControlBlock, kLocalsCount, kStackDepth,
2359                             kInitStackDepth, this->pc_, kReachable);
2360       Control* c = &control_.back();
2361       if (decoding_mode == kFunctionBody) {
2362         InitMerge(&c->start_merge, 0, [](uint32_t) -> Value { UNREACHABLE(); });
2363         InitMerge(&c->end_merge,
2364                   static_cast<uint32_t>(this->sig_->return_count()),
2365                   [&](uint32_t i) {
2366                     return Value{this->pc_, this->sig_->GetReturn(i)};
2367                   });
2368       } else {
2369         DCHECK_EQ(this->sig_->parameter_count(), 0);
2370         DCHECK_EQ(this->sig_->return_count(), 1);
2371         c->start_merge.arity = 0;
2372         c->end_merge.arity = 1;
2373         c->end_merge.vals.first = Value{this->pc_, this->sig_->GetReturn(0)};
2374       }
2375       CALL_INTERFACE_IF_OK_AND_REACHABLE(StartFunctionBody, c);
2376     }
2377 
2378     // Decode the function body.
2379     while (this->pc_ < this->end_) {
2380       // Most operations only grow the stack by at least one element (unary and
2381       // binary operations, local.get, constants, ...). Thus check that there is
2382       // enough space for those operations centrally, and avoid any bounds
2383       // checks in those operations.
2384       EnsureStackSpace(1);
2385       uint8_t first_byte = *this->pc_;
2386       WasmOpcode opcode = static_cast<WasmOpcode>(first_byte);
2387       CALL_INTERFACE_IF_OK_AND_REACHABLE(NextInstruction, opcode);
2388       int len;
2389       // Allowing two of the most common decoding functions to get inlined
2390       // appears to be the sweet spot.
2391       // Handling _all_ opcodes via a giant switch-statement has been tried
2392       // and found to be slower than calling through the handler table.
2393       if (opcode == kExprLocalGet) {
2394         len = WasmFullDecoder::DecodeLocalGet(this, opcode);
2395       } else if (opcode == kExprI32Const) {
2396         len = WasmFullDecoder::DecodeI32Const(this, opcode);
2397       } else {
2398         OpcodeHandler handler = GetOpcodeHandler(first_byte);
2399         len = (*handler)(this, opcode);
2400       }
2401       this->pc_ += len;
2402     }
2403 
2404     if (!VALIDATE(this->pc_ == this->end_)) {
2405       this->DecodeError("Beyond end of code");
2406     }
2407   }
2408 
2409  private:
2410   uint32_t locals_offset_ = 0;
2411   Interface interface_;
2412 
2413   // The value stack, stored as individual pointers for maximum performance.
2414   Value* stack_ = nullptr;
2415   Value* stack_end_ = nullptr;
2416   Value* stack_capacity_end_ = nullptr;
2417   ASSERT_TRIVIALLY_COPYABLE(Value);
2418 
2419   // stack of blocks, loops, and ifs.
2420   ZoneVector<Control> control_;
2421 
2422   // Controls whether code should be generated for the current block (basically
2423   // a cache for {ok() && control_.back().reachable()}).
2424   bool current_code_reachable_and_ok_ = true;
2425 
2426   // Depth of the current try block.
2427   int32_t current_catch_ = -1;
2428 
UnreachableValue(const uint8_t* pc)2429   static Value UnreachableValue(const uint8_t* pc) {
2430     return Value{pc, kWasmBottom};
2431   }
2432 
CheckSimdFeatureFlagOpcode(WasmOpcode opcode)2433   bool CheckSimdFeatureFlagOpcode(WasmOpcode opcode) {
2434     if (!FLAG_experimental_wasm_relaxed_simd &&
2435         WasmOpcodes::IsRelaxedSimdOpcode(opcode)) {
2436       this->DecodeError(
2437           "simd opcode not available, enable with --experimental-relaxed-simd");
2438       return false;
2439     }
2440 
2441     return true;
2442   }
2443 
MakeMemoryAccessImmediate( uint32_t pc_offset, uint32_t max_alignment)2444   MemoryAccessImmediate<validate> MakeMemoryAccessImmediate(
2445       uint32_t pc_offset, uint32_t max_alignment) {
2446     return MemoryAccessImmediate<validate>(
2447         this, this->pc_ + pc_offset, max_alignment, this->module_->is_memory64);
2448   }
2449 
2450 #ifdef DEBUG
2451   class TraceLine {
2452    public:
TraceLine(WasmFullDecoder* decoder)2453     explicit TraceLine(WasmFullDecoder* decoder) : decoder_(decoder) {
2454       WasmOpcode opcode = static_cast<WasmOpcode>(*decoder->pc());
2455       if (!WasmOpcodes::IsPrefixOpcode(opcode)) AppendOpcode(opcode);
2456     }
2457 
AppendOpcode(WasmOpcode opcode)2458     void AppendOpcode(WasmOpcode opcode) {
2459       DCHECK(!WasmOpcodes::IsPrefixOpcode(opcode));
2460       Append(TRACE_INST_FORMAT, decoder_->startrel(decoder_->pc_),
2461              WasmOpcodes::OpcodeName(opcode));
2462     }
2463 
~TraceLine()2464     ~TraceLine() {
2465       if (!FLAG_trace_wasm_decoder) return;
2466       AppendStackState();
2467       PrintF("%.*s\n", len_, buffer_);
2468     }
2469 
2470     // Appends a formatted string.
2471     PRINTF_FORMAT(2, 3)
Append(const char* format, ...)2472     void Append(const char* format, ...) {
2473       if (!FLAG_trace_wasm_decoder) return;
2474       va_list va_args;
2475       va_start(va_args, format);
2476       size_t remaining_len = kMaxLen - len_;
2477       base::Vector<char> remaining_msg_space(buffer_ + len_, remaining_len);
2478       int len = base::VSNPrintF(remaining_msg_space, format, va_args);
2479       va_end(va_args);
2480       len_ += len < 0 ? remaining_len : len;
2481     }
2482 
2483    private:
2484     void AppendStackState() {
2485       DCHECK(FLAG_trace_wasm_decoder);
2486       Append(" ");
2487       for (Control& c : decoder_->control_) {
2488         switch (c.kind) {
2489           case kControlIf:
2490             Append("I");
2491             break;
2492           case kControlBlock:
2493             Append("B");
2494             break;
2495           case kControlLoop:
2496             Append("L");
2497             break;
2498           case kControlTry:
2499             Append("T");
2500             break;
2501           case kControlIfElse:
2502             Append("E");
2503             break;
2504           case kControlTryCatch:
2505             Append("C");
2506             break;
2507           case kControlTryCatchAll:
2508             Append("A");
2509             break;
2510           case kControlLet:
2511             Append("D");
2512             break;
2513         }
2514         if (c.start_merge.arity) Append("%u-", c.start_merge.arity);
2515         Append("%u", c.end_merge.arity);
2516         if (!c.reachable()) Append("%c", c.unreachable() ? '*' : '#');
2517       }
2518       Append(" | ");
2519       for (size_t i = 0; i < decoder_->stack_size(); ++i) {
2520         Value& val = decoder_->stack_[i];
2521         Append(" %c", val.type.short_name());
2522       }
2523     }
2524 
2525     static constexpr int kMaxLen = 512;
2526 
2527     char buffer_[kMaxLen];
2528     int len_ = 0;
2529     WasmFullDecoder* const decoder_;
2530   };
2531 #else
2532   class TraceLine {
2533    public:
TraceLine(WasmFullDecoder*)2534     explicit TraceLine(WasmFullDecoder*) {}
2535 
AppendOpcode(WasmOpcode)2536     void AppendOpcode(WasmOpcode) {}
2537 
2538     PRINTF_FORMAT(2, 3)
Append(const char* format, ...)2539     void Append(const char* format, ...) {}
2540   };
2541 #endif
2542 
2543 #define DECODE(name)                                                     \
2544   static int Decode##name(WasmFullDecoder* decoder, WasmOpcode opcode) { \
2545     TraceLine trace_msg(decoder);                                        \
2546     return decoder->Decode##name##Impl(&trace_msg, opcode);              \
2547   }                                                                      \
2548   V8_INLINE int Decode##name##Impl(TraceLine* trace_msg, WasmOpcode opcode)
2549 
DECODE(Nop)2550   DECODE(Nop) { return 1; }
2551 
DECODE(NopForTestingUnsupportedInLiftoff)2552   DECODE(NopForTestingUnsupportedInLiftoff) {
2553     if (!VALIDATE(FLAG_enable_testing_opcode_in_wasm)) {
2554       this->DecodeError("Invalid opcode 0x%x", opcode);
2555       return 0;
2556     }
2557     CALL_INTERFACE_IF_OK_AND_REACHABLE(NopForTestingUnsupportedInLiftoff);
2558     return 1;
2559   }
2560 
2561 #define BUILD_SIMPLE_OPCODE(op, _, sig) \
2562   DECODE(op) { return BuildSimpleOperator_##sig(kExpr##op); }
2563   FOREACH_SIMPLE_NON_CONST_OPCODE(BUILD_SIMPLE_OPCODE)
2564 #undef BUILD_SIMPLE_OPCODE
2565 
2566 #define BUILD_SIMPLE_OPCODE(op, _, sig)                     \
2567   DECODE(op) {                                              \
2568     if (decoding_mode == kInitExpression) {                 \
2569       if (!VALIDATE(this->enabled_.has_extended_const())) { \
2570         NonConstError(this, kExpr##op);                     \
2571         return 0;                                           \
2572       }                                                     \
2573     }                                                       \
2574     return BuildSimpleOperator_##sig(kExpr##op);            \
2575   }
2576   FOREACH_SIMPLE_EXTENDED_CONST_OPCODE(BUILD_SIMPLE_OPCODE)
2577 #undef BUILD_SIMPLE_OPCODE
2578 
DECODE(Block)2579   DECODE(Block) {
2580     BlockTypeImmediate<validate> imm(this->enabled_, this, this->pc_ + 1,
2581                                      this->module_);
2582     if (!this->Validate(this->pc_ + 1, imm)) return 0;
2583     ArgVector args = PeekArgs(imm.sig);
2584     Control* block = PushControl(kControlBlock, 0, args.length());
2585     SetBlockType(block, imm, args.begin());
2586     CALL_INTERFACE_IF_OK_AND_REACHABLE(Block, block);
2587     DropArgs(imm.sig);
2588     PushMergeValues(block, &block->start_merge);
2589     return 1 + imm.length;
2590   }
2591 
DECODE(Rethrow)2592   DECODE(Rethrow) {
2593     CHECK_PROTOTYPE_OPCODE(eh);
2594     BranchDepthImmediate<validate> imm(this, this->pc_ + 1);
2595     if (!this->Validate(this->pc_ + 1, imm, control_.size())) return 0;
2596     Control* c = control_at(imm.depth);
2597     if (!VALIDATE(c->is_try_catchall() || c->is_try_catch())) {
2598       this->error("rethrow not targeting catch or catch-all");
2599       return 0;
2600     }
2601     CALL_INTERFACE_IF_OK_AND_REACHABLE(Rethrow, c);
2602     EndControl();
2603     return 1 + imm.length;
2604   }
2605 
DECODE(Throw)2606   DECODE(Throw) {
2607     CHECK_PROTOTYPE_OPCODE(eh);
2608     TagIndexImmediate<validate> imm(this, this->pc_ + 1);
2609     if (!this->Validate(this->pc_ + 1, imm)) return 0;
2610     ArgVector args = PeekArgs(imm.tag->ToFunctionSig());
2611     CALL_INTERFACE_IF_OK_AND_REACHABLE(Throw, imm, base::VectorOf(args));
2612     DropArgs(imm.tag->ToFunctionSig());
2613     EndControl();
2614     return 1 + imm.length;
2615   }
2616 
DECODE(Try)2617   DECODE(Try) {
2618     CHECK_PROTOTYPE_OPCODE(eh);
2619     BlockTypeImmediate<validate> imm(this->enabled_, this, this->pc_ + 1,
2620                                      this->module_);
2621     if (!this->Validate(this->pc_ + 1, imm)) return 0;
2622     ArgVector args = PeekArgs(imm.sig);
2623     Control* try_block = PushControl(kControlTry, 0, args.length());
2624     SetBlockType(try_block, imm, args.begin());
2625     try_block->previous_catch = current_catch_;
2626     current_catch_ = static_cast<int>(control_depth() - 1);
2627     CALL_INTERFACE_IF_OK_AND_REACHABLE(Try, try_block);
2628     DropArgs(imm.sig);
2629     PushMergeValues(try_block, &try_block->start_merge);
2630     return 1 + imm.length;
2631   }
2632 
DECODE(Catch)2633   DECODE(Catch) {
2634     CHECK_PROTOTYPE_OPCODE(eh);
2635     TagIndexImmediate<validate> imm(this, this->pc_ + 1);
2636     if (!this->Validate(this->pc_ + 1, imm)) return 0;
2637     DCHECK(!control_.empty());
2638     Control* c = &control_.back();
2639     if (!VALIDATE(c->is_try())) {
2640       this->DecodeError("catch does not match a try");
2641       return 0;
2642     }
2643     if (!VALIDATE(!c->is_try_catchall())) {
2644       this->DecodeError("catch after catch-all for try");
2645       return 0;
2646     }
2647     FallThrough();
2648     c->kind = kControlTryCatch;
2649     // TODO(jkummerow): Consider moving the stack manipulation after the
2650     // INTERFACE call for consistency.
2651     DCHECK_LE(stack_ + c->stack_depth, stack_end_);
2652     stack_end_ = stack_ + c->stack_depth;
2653     c->reachability = control_at(1)->innerReachability();
2654     const WasmTagSig* sig = imm.tag->sig;
2655     EnsureStackSpace(static_cast<int>(sig->parameter_count()));
2656     for (ValueType type : sig->parameters()) Push(CreateValue(type));
2657     base::Vector<Value> values(stack_ + c->stack_depth, sig->parameter_count());
2658     current_catch_ = c->previous_catch;  // Pop try scope.
2659     CALL_INTERFACE_IF_OK_AND_PARENT_REACHABLE(CatchException, imm, c, values);
2660     current_code_reachable_and_ok_ = this->ok() && c->reachable();
2661     return 1 + imm.length;
2662   }
2663 
DECODE(Delegate)2664   DECODE(Delegate) {
2665     CHECK_PROTOTYPE_OPCODE(eh);
2666     BranchDepthImmediate<validate> imm(this, this->pc_ + 1);
2667     // -1 because the current try block is not included in the count.
2668     if (!this->Validate(this->pc_ + 1, imm, control_depth() - 1)) return 0;
2669     Control* c = &control_.back();
2670     if (!VALIDATE(c->is_incomplete_try())) {
2671       this->DecodeError("delegate does not match a try");
2672       return 0;
2673     }
2674     // +1 because the current try block is not included in the count.
2675     uint32_t target_depth = imm.depth + 1;
2676     while (target_depth < control_depth() - 1 &&
2677            (!control_at(target_depth)->is_try() ||
2678             control_at(target_depth)->is_try_catch() ||
2679             control_at(target_depth)->is_try_catchall())) {
2680       target_depth++;
2681     }
2682     FallThrough();
2683     CALL_INTERFACE_IF_OK_AND_PARENT_REACHABLE(Delegate, target_depth, c);
2684     current_catch_ = c->previous_catch;
2685     EndControl();
2686     PopControl();
2687     return 1 + imm.length;
2688   }
2689 
DECODE(CatchAll)2690   DECODE(CatchAll) {
2691     CHECK_PROTOTYPE_OPCODE(eh);
2692     DCHECK(!control_.empty());
2693     Control* c = &control_.back();
2694     if (!VALIDATE(c->is_try())) {
2695       this->DecodeError("catch-all does not match a try");
2696       return 0;
2697     }
2698     if (!VALIDATE(!c->is_try_catchall())) {
2699       this->error("catch-all already present for try");
2700       return 0;
2701     }
2702     FallThrough();
2703     c->kind = kControlTryCatchAll;
2704     c->reachability = control_at(1)->innerReachability();
2705     current_catch_ = c->previous_catch;  // Pop try scope.
2706     CALL_INTERFACE_IF_OK_AND_PARENT_REACHABLE(CatchAll, c);
2707     stack_end_ = stack_ + c->stack_depth;
2708     current_code_reachable_and_ok_ = this->ok() && c->reachable();
2709     return 1;
2710   }
2711 
DECODE(BrOnNull)2712   DECODE(BrOnNull) {
2713     CHECK_PROTOTYPE_OPCODE(typed_funcref);
2714     BranchDepthImmediate<validate> imm(this, this->pc_ + 1);
2715     if (!this->Validate(this->pc_ + 1, imm, control_.size())) return 0;
2716     Value ref_object = Peek(0);
2717     Control* c = control_at(imm.depth);
2718     if (!VALIDATE(TypeCheckBranch<true>(c, 1))) return 0;
2719     switch (ref_object.type.kind()) {
2720       case kBottom:
2721         // We are in a polymorphic stack. Leave the stack as it is.
2722         DCHECK(!current_code_reachable_and_ok_);
2723         break;
2724       case kRef:
2725         // For a non-nullable value, we won't take the branch, and can leave
2726         // the stack as it is.
2727         break;
2728       case kOptRef: {
2729         Value result = CreateValue(
2730             ValueType::Ref(ref_object.type.heap_type(), kNonNullable));
2731         // The result of br_on_null has the same value as the argument (but a
2732         // non-nullable type).
2733         if (V8_LIKELY(current_code_reachable_and_ok_)) {
2734           CALL_INTERFACE(BrOnNull, ref_object, imm.depth, false, &result);
2735           c->br_merge()->reached = true;
2736         }
2737         // In unreachable code, we still have to push a value of the correct
2738         // type onto the stack.
2739         Drop(ref_object);
2740         Push(result);
2741         break;
2742       }
2743       default:
2744         PopTypeError(0, ref_object, "object reference");
2745         return 0;
2746     }
2747     return 1 + imm.length;
2748   }
2749 
DECODE(BrOnNonNull)2750   DECODE(BrOnNonNull) {
2751     CHECK_PROTOTYPE_OPCODE(gc);
2752     BranchDepthImmediate<validate> imm(this, this->pc_ + 1);
2753     if (!this->Validate(this->pc_ + 1, imm, control_.size())) return 0;
2754     Value ref_object = Peek(0, 0, kWasmAnyRef);
2755     Drop(ref_object);
2756     // Typechecking the branch and creating the branch merges requires the
2757     // non-null value on the stack, so we push it temporarily.
2758     Value result = CreateValue(ref_object.type.AsNonNull());
2759     Push(result);
2760     Control* c = control_at(imm.depth);
2761     if (!VALIDATE(TypeCheckBranch<true>(c, 0))) return 0;
2762     switch (ref_object.type.kind()) {
2763       case kBottom:
2764         // We are in unreachable code. Do nothing.
2765         DCHECK(!current_code_reachable_and_ok_);
2766         break;
2767       case kRef:
2768         // For a non-nullable value, we always take the branch.
2769         if (V8_LIKELY(current_code_reachable_and_ok_)) {
2770           CALL_INTERFACE(Forward, ref_object, stack_value(1));
2771           CALL_INTERFACE(BrOrRet, imm.depth, 0);
2772           // We know that the following code is not reachable, but according
2773           // to the spec it technically is. Set it to spec-only reachable.
2774           SetSucceedingCodeDynamicallyUnreachable();
2775           c->br_merge()->reached = true;
2776         }
2777         break;
2778       case kOptRef: {
2779         if (V8_LIKELY(current_code_reachable_and_ok_)) {
2780           CALL_INTERFACE(Forward, ref_object, stack_value(1));
2781           CALL_INTERFACE(BrOnNonNull, ref_object, imm.depth);
2782           c->br_merge()->reached = true;
2783         }
2784         break;
2785       }
2786       default:
2787         PopTypeError(0, ref_object, "object reference");
2788         return 0;
2789     }
2790     // If we stay in the branch, {ref_object} is null. Drop it from the stack.
2791     Drop(result);
2792     return 1 + imm.length;
2793   }
2794 
DECODE(Let)2795   DECODE(Let) {
2796     CHECK_PROTOTYPE_OPCODE(typed_funcref);
2797     BlockTypeImmediate<validate> imm(this->enabled_, this, this->pc_ + 1,
2798                                      this->module_);
2799     if (!this->Validate(this->pc_ + 1, imm)) return 0;
2800     // Temporarily add the let-defined values to the beginning of the function
2801     // locals.
2802     uint32_t locals_length;
2803     int new_locals_count =
2804         this->DecodeLocals(this->pc() + 1 + imm.length, &locals_length, 0);
2805     if (new_locals_count < 0) {
2806       return 0;
2807     }
2808     ArgVector let_local_values =
2809         PeekArgs(base::VectorOf(this->local_types_.data(), new_locals_count));
2810     ArgVector args = PeekArgs(imm.sig, new_locals_count);
2811     Control* let_block = PushControl(kControlLet, new_locals_count,
2812                                      let_local_values.length() + args.length());
2813     SetBlockType(let_block, imm, args.begin());
2814     CALL_INTERFACE_IF_OK_AND_REACHABLE(Block, let_block);
2815     CALL_INTERFACE_IF_OK_AND_REACHABLE(AllocateLocals,
2816                                        base::VectorOf(let_local_values));
2817     Drop(new_locals_count);  // Drop {let_local_values}.
2818     DropArgs(imm.sig);       // Drop {args}.
2819     PushMergeValues(let_block, &let_block->start_merge);
2820     return 1 + imm.length + locals_length;
2821   }
2822 
DECODE(Loop)2823   DECODE(Loop) {
2824     BlockTypeImmediate<validate> imm(this->enabled_, this, this->pc_ + 1,
2825                                      this->module_);
2826     if (!this->Validate(this->pc_ + 1, imm)) return 0;
2827     ArgVector args = PeekArgs(imm.sig);
2828     Control* block = PushControl(kControlLoop, 0, args.length());
2829     SetBlockType(&control_.back(), imm, args.begin());
2830     CALL_INTERFACE_IF_OK_AND_REACHABLE(Loop, block);
2831     DropArgs(imm.sig);
2832     PushMergeValues(block, &block->start_merge);
2833     return 1 + imm.length;
2834   }
2835 
DECODE(If)2836   DECODE(If) {
2837     BlockTypeImmediate<validate> imm(this->enabled_, this, this->pc_ + 1,
2838                                      this->module_);
2839     if (!this->Validate(this->pc_ + 1, imm)) return 0;
2840     Value cond = Peek(0, 0, kWasmI32);
2841     ArgVector args = PeekArgs(imm.sig, 1);
2842     if (!VALIDATE(this->ok())) return 0;
2843     Control* if_block = PushControl(kControlIf, 0, 1 + args.length());
2844     SetBlockType(if_block, imm, args.begin());
2845     CALL_INTERFACE_IF_OK_AND_REACHABLE(If, cond, if_block);
2846     Drop(cond);
2847     DropArgs(imm.sig);  // Drop {args}.
2848     PushMergeValues(if_block, &if_block->start_merge);
2849     return 1 + imm.length;
2850   }
2851 
DECODE(Else)2852   DECODE(Else) {
2853     DCHECK(!control_.empty());
2854     Control* c = &control_.back();
2855     if (!VALIDATE(c->is_if())) {
2856       this->DecodeError("else does not match an if");
2857       return 0;
2858     }
2859     if (!VALIDATE(c->is_onearmed_if())) {
2860       this->DecodeError("else already present for if");
2861       return 0;
2862     }
2863     if (!VALIDATE(TypeCheckFallThru())) return 0;
2864     c->kind = kControlIfElse;
2865     CALL_INTERFACE_IF_OK_AND_PARENT_REACHABLE(Else, c);
2866     if (c->reachable()) c->end_merge.reached = true;
2867     PushMergeValues(c, &c->start_merge);
2868     c->reachability = control_at(1)->innerReachability();
2869     current_code_reachable_and_ok_ = this->ok() && c->reachable();
2870     return 1;
2871   }
2872 
DECODE(End)2873   DECODE(End) {
2874     DCHECK(!control_.empty());
2875     if (decoding_mode == kFunctionBody) {
2876       Control* c = &control_.back();
2877       if (c->is_incomplete_try()) {
2878         // Catch-less try, fall through to the implicit catch-all.
2879         c->kind = kControlTryCatch;
2880         current_catch_ = c->previous_catch;  // Pop try scope.
2881       }
2882       if (c->is_try_catch()) {
2883         // Emulate catch-all + re-throw.
2884         FallThrough();
2885         c->reachability = control_at(1)->innerReachability();
2886         CALL_INTERFACE_IF_OK_AND_PARENT_REACHABLE(CatchAll, c);
2887         current_code_reachable_and_ok_ =
2888             this->ok() && control_.back().reachable();
2889         CALL_INTERFACE_IF_OK_AND_REACHABLE(Rethrow, c);
2890         EndControl();
2891         PopControl();
2892         return 1;
2893       }
2894       if (c->is_onearmed_if()) {
2895         if (!VALIDATE(TypeCheckOneArmedIf(c))) return 0;
2896       }
2897 
2898       if (c->is_let()) {
2899         CALL_INTERFACE_IF_OK_AND_REACHABLE(DeallocateLocals, c->locals_count);
2900         this->local_types_.erase(this->local_types_.begin(),
2901                                  this->local_types_.begin() + c->locals_count);
2902         this->num_locals_ -= c->locals_count;
2903       }
2904     }
2905 
2906     if (control_.size() == 1) {
2907       // We need to call this first because the interface might set
2908       // {this->end_}, making the next check pass.
2909       DoReturn<kStrictCounting, decoding_mode == kFunctionBody
2910                                     ? kFallthroughMerge
2911                                     : kInitExprMerge>();
2912       // If at the last (implicit) control, check we are at end.
2913       if (!VALIDATE(this->pc_ + 1 == this->end_)) {
2914         this->DecodeError(this->pc_ + 1, "trailing code after function end");
2915         return 0;
2916       }
2917       // The result of the block is the return value.
2918       trace_msg->Append("\n" TRACE_INST_FORMAT, startrel(this->pc_),
2919                         "(implicit) return");
2920       control_.clear();
2921       return 1;
2922     }
2923 
2924     if (!VALIDATE(TypeCheckFallThru())) return 0;
2925     PopControl();
2926     return 1;
2927   }
2928 
DECODE(Select)2929   DECODE(Select) {
2930     Value cond = Peek(0, 2, kWasmI32);
2931     Value fval = Peek(1);
2932     Value tval = Peek(2, 0, fval.type);
2933     ValueType type = tval.type == kWasmBottom ? fval.type : tval.type;
2934     if (!VALIDATE(!type.is_reference())) {
2935       this->DecodeError(
2936           "select without type is only valid for value type inputs");
2937       return 0;
2938     }
2939     Value result = CreateValue(type);
2940     CALL_INTERFACE_IF_OK_AND_REACHABLE(Select, cond, fval, tval, &result);
2941     Drop(3);
2942     Push(result);
2943     return 1;
2944   }
2945 
DECODE(SelectWithType)2946   DECODE(SelectWithType) {
2947     this->detected_->Add(kFeature_reftypes);
2948     SelectTypeImmediate<validate> imm(this->enabled_, this, this->pc_ + 1,
2949                                       this->module_);
2950     if (this->failed()) return 0;
2951     Value cond = Peek(0, 2, kWasmI32);
2952     Value fval = Peek(1, 1, imm.type);
2953     Value tval = Peek(2, 0, imm.type);
2954     Value result = CreateValue(imm.type);
2955     CALL_INTERFACE_IF_OK_AND_REACHABLE(Select, cond, fval, tval, &result);
2956     Drop(3);
2957     Push(result);
2958     return 1 + imm.length;
2959   }
2960 
DECODE(Br)2961   DECODE(Br) {
2962     BranchDepthImmediate<validate> imm(this, this->pc_ + 1);
2963     if (!this->Validate(this->pc_ + 1, imm, control_.size())) return 0;
2964     Control* c = control_at(imm.depth);
2965     if (!VALIDATE(TypeCheckBranch<false>(c, 0))) return 0;
2966     if (V8_LIKELY(current_code_reachable_and_ok_)) {
2967       CALL_INTERFACE(BrOrRet, imm.depth, 0);
2968       c->br_merge()->reached = true;
2969     }
2970     EndControl();
2971     return 1 + imm.length;
2972   }
2973 
DECODE(BrIf)2974   DECODE(BrIf) {
2975     BranchDepthImmediate<validate> imm(this, this->pc_ + 1);
2976     if (!this->Validate(this->pc_ + 1, imm, control_.size())) return 0;
2977     Value cond = Peek(0, 0, kWasmI32);
2978     Control* c = control_at(imm.depth);
2979     if (!VALIDATE(TypeCheckBranch<true>(c, 1))) return 0;
2980     if (V8_LIKELY(current_code_reachable_and_ok_)) {
2981       CALL_INTERFACE(BrIf, cond, imm.depth);
2982       c->br_merge()->reached = true;
2983     }
2984     Drop(cond);
2985     return 1 + imm.length;
2986   }
2987 
DECODE(BrTable)2988   DECODE(BrTable) {
2989     BranchTableImmediate<validate> imm(this, this->pc_ + 1);
2990     BranchTableIterator<validate> iterator(this, imm);
2991     Value key = Peek(0, 0, kWasmI32);
2992     if (this->failed()) return 0;
2993     if (!this->Validate(this->pc_ + 1, imm, control_.size())) return 0;
2994 
2995     // Cache the branch targets during the iteration, so that we can set
2996     // all branch targets as reachable after the {CALL_INTERFACE} call.
2997     std::vector<bool> br_targets(control_.size());
2998 
2999     uint32_t arity = 0;
3000 
3001     while (iterator.has_next()) {
3002       const uint32_t index = iterator.cur_index();
3003       const byte* pos = iterator.pc();
3004       const uint32_t target = iterator.next();
3005       if (!VALIDATE(target < control_depth())) {
3006         this->DecodeError(pos, "invalid branch depth: %u", target);
3007         return 0;
3008       }
3009       // Avoid redundant branch target checks.
3010       if (br_targets[target]) continue;
3011       br_targets[target] = true;
3012 
3013       if (validate) {
3014         if (index == 0) {
3015           arity = control_at(target)->br_merge()->arity;
3016         } else if (!VALIDATE(control_at(target)->br_merge()->arity == arity)) {
3017           this->DecodeError(
3018               pos, "br_table: label arity inconsistent with previous arity %d",
3019               arity);
3020           return 0;
3021         }
3022         if (!VALIDATE(TypeCheckBranch<false>(control_at(target), 1))) return 0;
3023       }
3024     }
3025 
3026     if (V8_LIKELY(current_code_reachable_and_ok_)) {
3027       CALL_INTERFACE(BrTable, imm, key);
3028 
3029       for (uint32_t i = 0; i < control_depth(); ++i) {
3030         control_at(i)->br_merge()->reached |= br_targets[i];
3031       }
3032     }
3033     Drop(key);
3034     EndControl();
3035     return 1 + iterator.length();
3036   }
3037 
DECODE(Return)3038   DECODE(Return) {
3039     return DoReturn<kNonStrictCounting, kReturnMerge>() ? 1 : 0;
3040   }
3041 
DECODE(Unreachable)3042   DECODE(Unreachable) {
3043     CALL_INTERFACE_IF_OK_AND_REACHABLE(Trap, TrapReason::kTrapUnreachable);
3044     EndControl();
3045     return 1;
3046   }
3047 
DECODE(I32Const)3048   DECODE(I32Const) {
3049     ImmI32Immediate<validate> imm(this, this->pc_ + 1);
3050     Value value = CreateValue(kWasmI32);
3051     CALL_INTERFACE_IF_OK_AND_REACHABLE(I32Const, &value, imm.value);
3052     Push(value);
3053     return 1 + imm.length;
3054   }
3055 
DECODE(I64Const)3056   DECODE(I64Const) {
3057     ImmI64Immediate<validate> imm(this, this->pc_ + 1);
3058     Value value = CreateValue(kWasmI64);
3059     CALL_INTERFACE_IF_OK_AND_REACHABLE(I64Const, &value, imm.value);
3060     Push(value);
3061     return 1 + imm.length;
3062   }
3063 
DECODE(F32Const)3064   DECODE(F32Const) {
3065     ImmF32Immediate<validate> imm(this, this->pc_ + 1);
3066     Value value = CreateValue(kWasmF32);
3067     CALL_INTERFACE_IF_OK_AND_REACHABLE(F32Const, &value, imm.value);
3068     Push(value);
3069     return 1 + imm.length;
3070   }
3071 
DECODE(F64Const)3072   DECODE(F64Const) {
3073     ImmF64Immediate<validate> imm(this, this->pc_ + 1);
3074     Value value = CreateValue(kWasmF64);
3075     CALL_INTERFACE_IF_OK_AND_REACHABLE(F64Const, &value, imm.value);
3076     Push(value);
3077     return 1 + imm.length;
3078   }
3079 
DECODE(RefNull)3080   DECODE(RefNull) {
3081     this->detected_->Add(kFeature_reftypes);
3082     HeapTypeImmediate<validate> imm(this->enabled_, this, this->pc_ + 1,
3083                                     this->module_);
3084     if (!VALIDATE(this->ok())) return 0;
3085     ValueType type = ValueType::Ref(imm.type, kNullable);
3086     Value value = CreateValue(type);
3087     CALL_INTERFACE_IF_OK_AND_REACHABLE(RefNull, type, &value);
3088     Push(value);
3089     return 1 + imm.length;
3090   }
3091 
DECODE(RefIsNull)3092   DECODE(RefIsNull) {
3093     this->detected_->Add(kFeature_reftypes);
3094     Value value = Peek(0);
3095     Value result = CreateValue(kWasmI32);
3096     switch (value.type.kind()) {
3097       case kOptRef:
3098         CALL_INTERFACE_IF_OK_AND_REACHABLE(UnOp, kExprRefIsNull, value,
3099                                            &result);
3100         Drop(value);
3101         Push(result);
3102         return 1;
3103       case kBottom:
3104         // We are in unreachable code, the return value does not matter.
3105       case kRef:
3106         // For non-nullable references, the result is always false.
3107         CALL_INTERFACE_IF_OK_AND_REACHABLE(Drop);
3108         Drop(value);
3109         CALL_INTERFACE_IF_OK_AND_REACHABLE(I32Const, &result, 0);
3110         Push(result);
3111         return 1;
3112       default:
3113         if (validate) {
3114           PopTypeError(0, value, "reference type");
3115           return 0;
3116         }
3117         UNREACHABLE();
3118     }
3119   }
3120 
DECODE(RefFunc)3121   DECODE(RefFunc) {
3122     this->detected_->Add(kFeature_reftypes);
3123     IndexImmediate<validate> imm(this, this->pc_ + 1, "function index");
3124     if (!this->ValidateFunction(this->pc_ + 1, imm)) return 0;
3125     HeapType heap_type(this->enabled_.has_typed_funcref()
3126                            ? this->module_->functions[imm.index].sig_index
3127                            : HeapType::kFunc);
3128     Value value = CreateValue(ValueType::Ref(heap_type, kNonNullable));
3129     CALL_INTERFACE_IF_OK_AND_REACHABLE(RefFunc, imm.index, &value);
3130     Push(value);
3131     return 1 + imm.length;
3132   }
3133 
DECODE(RefAsNonNull)3134   DECODE(RefAsNonNull) {
3135     CHECK_PROTOTYPE_OPCODE(typed_funcref);
3136     Value value = Peek(0);
3137     switch (value.type.kind()) {
3138       case kBottom:
3139         // We are in unreachable code. Forward the bottom value.
3140       case kRef:
3141         // A non-nullable value can remain as-is.
3142         return 1;
3143       case kOptRef: {
3144         Value result =
3145             CreateValue(ValueType::Ref(value.type.heap_type(), kNonNullable));
3146         CALL_INTERFACE_IF_OK_AND_REACHABLE(RefAsNonNull, value, &result);
3147         Drop(value);
3148         Push(result);
3149         return 1;
3150       }
3151       default:
3152         if (validate) {
3153           PopTypeError(0, value, "reference type");
3154         }
3155         return 0;
3156     }
3157   }
3158 
DECODE(LocalGet)3159   V8_INLINE DECODE(LocalGet) {
3160     IndexImmediate<validate> imm(this, this->pc_ + 1, "local index");
3161     if (!this->ValidateLocal(this->pc_ + 1, imm)) return 0;
3162     if (!VALIDATE(!this->enabled_.has_nn_locals() ||
3163                   this->is_local_initialized(imm.index))) {
3164       this->DecodeError(this->pc_, "uninitialized non-defaultable local: %u",
3165                         imm.index);
3166       return 0;
3167     }
3168     Value value = CreateValue(this->local_type(imm.index));
3169     CALL_INTERFACE_IF_OK_AND_REACHABLE(LocalGet, &value, imm);
3170     Push(value);
3171     return 1 + imm.length;
3172   }
3173 
DECODE(LocalSet)3174   DECODE(LocalSet) {
3175     IndexImmediate<validate> imm(this, this->pc_ + 1, "local index");
3176     if (!this->ValidateLocal(this->pc_ + 1, imm)) return 0;
3177     Value value = Peek(0, 0, this->local_type(imm.index));
3178     CALL_INTERFACE_IF_OK_AND_REACHABLE(LocalSet, value, imm);
3179     Drop(value);
3180     this->set_local_initialized(imm.index);
3181     return 1 + imm.length;
3182   }
3183 
DECODE(LocalTee)3184   DECODE(LocalTee) {
3185     IndexImmediate<validate> imm(this, this->pc_ + 1, "local index");
3186     if (!this->ValidateLocal(this->pc_ + 1, imm)) return 0;
3187     ValueType local_type = this->local_type(imm.index);
3188     Value value = Peek(0, 0, local_type);
3189     Value result = CreateValue(local_type);
3190     CALL_INTERFACE_IF_OK_AND_REACHABLE(LocalTee, value, &result, imm);
3191     Drop(value);
3192     Push(result);
3193     this->set_local_initialized(imm.index);
3194     return 1 + imm.length;
3195   }
3196 
DECODE(Drop)3197   DECODE(Drop) {
3198     Peek(0);
3199     CALL_INTERFACE_IF_OK_AND_REACHABLE(Drop);
3200     Drop(1);
3201     return 1;
3202   }
3203 
DECODE(GlobalGet)3204   DECODE(GlobalGet) {
3205     GlobalIndexImmediate<validate> imm(this, this->pc_ + 1);
3206     if (!this->Validate(this->pc_ + 1, imm)) return 0;
3207     Value result = CreateValue(imm.global->type);
3208     CALL_INTERFACE_IF_OK_AND_REACHABLE(GlobalGet, &result, imm);
3209     Push(result);
3210     return 1 + imm.length;
3211   }
3212 
DECODE(GlobalSet)3213   DECODE(GlobalSet) {
3214     GlobalIndexImmediate<validate> imm(this, this->pc_ + 1);
3215     if (!this->Validate(this->pc_ + 1, imm)) return 0;
3216     if (!VALIDATE(imm.global->mutability)) {
3217       this->DecodeError("immutable global #%u cannot be assigned", imm.index);
3218       return 0;
3219     }
3220     Value value = Peek(0, 0, imm.global->type);
3221     CALL_INTERFACE_IF_OK_AND_REACHABLE(GlobalSet, value, imm);
3222     Drop(value);
3223     return 1 + imm.length;
3224   }
3225 
DECODE(TableGet)3226   DECODE(TableGet) {
3227     this->detected_->Add(kFeature_reftypes);
3228     IndexImmediate<validate> imm(this, this->pc_ + 1, "table index");
3229     if (!this->ValidateTable(this->pc_ + 1, imm)) return 0;
3230     Value index = Peek(0, 0, kWasmI32);
3231     Value result = CreateValue(this->module_->tables[imm.index].type);
3232     CALL_INTERFACE_IF_OK_AND_REACHABLE(TableGet, index, &result, imm);
3233     Drop(index);
3234     Push(result);
3235     return 1 + imm.length;
3236   }
3237 
DECODE(TableSet)3238   DECODE(TableSet) {
3239     this->detected_->Add(kFeature_reftypes);
3240     IndexImmediate<validate> imm(this, this->pc_ + 1, "table index");
3241     if (!this->ValidateTable(this->pc_ + 1, imm)) return 0;
3242     Value value = Peek(0, 1, this->module_->tables[imm.index].type);
3243     Value index = Peek(1, 0, kWasmI32);
3244     CALL_INTERFACE_IF_OK_AND_REACHABLE(TableSet, index, value, imm);
3245     Drop(2);
3246     return 1 + imm.length;
3247   }
3248 
DECODE(LoadMem)3249   DECODE(LoadMem) {
3250     // Hard-code the list of load types. The opcodes are highly unlikely to
3251     // ever change, and we have some checks here to guard against that.
3252     static_assert(sizeof(LoadType) == sizeof(uint8_t), "LoadType is compact");
3253     static constexpr uint8_t kMinOpcode = kExprI32LoadMem;
3254     static constexpr uint8_t kMaxOpcode = kExprI64LoadMem32U;
3255     static constexpr LoadType kLoadTypes[] = {
3256         LoadType::kI32Load,    LoadType::kI64Load,    LoadType::kF32Load,
3257         LoadType::kF64Load,    LoadType::kI32Load8S,  LoadType::kI32Load8U,
3258         LoadType::kI32Load16S, LoadType::kI32Load16U, LoadType::kI64Load8S,
3259         LoadType::kI64Load8U,  LoadType::kI64Load16S, LoadType::kI64Load16U,
3260         LoadType::kI64Load32S, LoadType::kI64Load32U};
3261     STATIC_ASSERT(arraysize(kLoadTypes) == kMaxOpcode - kMinOpcode + 1);
3262     DCHECK_LE(kMinOpcode, opcode);
3263     DCHECK_GE(kMaxOpcode, opcode);
3264     return DecodeLoadMem(kLoadTypes[opcode - kMinOpcode]);
3265   }
3266 
DECODE(StoreMem)3267   DECODE(StoreMem) {
3268     // Hard-code the list of store types. The opcodes are highly unlikely to
3269     // ever change, and we have some checks here to guard against that.
3270     static_assert(sizeof(StoreType) == sizeof(uint8_t), "StoreType is compact");
3271     static constexpr uint8_t kMinOpcode = kExprI32StoreMem;
3272     static constexpr uint8_t kMaxOpcode = kExprI64StoreMem32;
3273     static constexpr StoreType kStoreTypes[] = {
3274         StoreType::kI32Store,  StoreType::kI64Store,   StoreType::kF32Store,
3275         StoreType::kF64Store,  StoreType::kI32Store8,  StoreType::kI32Store16,
3276         StoreType::kI64Store8, StoreType::kI64Store16, StoreType::kI64Store32};
3277     STATIC_ASSERT(arraysize(kStoreTypes) == kMaxOpcode - kMinOpcode + 1);
3278     DCHECK_LE(kMinOpcode, opcode);
3279     DCHECK_GE(kMaxOpcode, opcode);
3280     return DecodeStoreMem(kStoreTypes[opcode - kMinOpcode]);
3281   }
3282 
DECODE(MemoryGrow)3283   DECODE(MemoryGrow) {
3284     MemoryIndexImmediate<validate> imm(this, this->pc_ + 1);
3285     if (!this->Validate(this->pc_ + 1, imm)) return 0;
3286     // This opcode will not be emitted by the asm translator.
3287     DCHECK_EQ(kWasmOrigin, this->module_->origin);
3288     ValueType mem_type = this->module_->is_memory64 ? kWasmI64 : kWasmI32;
3289     Value value = Peek(0, 0, mem_type);
3290     Value result = CreateValue(mem_type);
3291     CALL_INTERFACE_IF_OK_AND_REACHABLE(MemoryGrow, value, &result);
3292     Drop(value);
3293     Push(result);
3294     return 1 + imm.length;
3295   }
3296 
DECODE(MemorySize)3297   DECODE(MemorySize) {
3298     MemoryIndexImmediate<validate> imm(this, this->pc_ + 1);
3299     if (!this->Validate(this->pc_ + 1, imm)) return 0;
3300     ValueType result_type = this->module_->is_memory64 ? kWasmI64 : kWasmI32;
3301     Value result = CreateValue(result_type);
3302     CALL_INTERFACE_IF_OK_AND_REACHABLE(CurrentMemoryPages, &result);
3303     Push(result);
3304     return 1 + imm.length;
3305   }
3306 
DECODE(CallFunction)3307   DECODE(CallFunction) {
3308     CallFunctionImmediate<validate> imm(this, this->pc_ + 1);
3309     if (!this->Validate(this->pc_ + 1, imm)) return 0;
3310     ArgVector args = PeekArgs(imm.sig);
3311     ReturnVector returns = CreateReturnValues(imm.sig);
3312     CALL_INTERFACE_IF_OK_AND_REACHABLE(CallDirect, imm, args.begin(),
3313                                        returns.begin());
3314     DropArgs(imm.sig);
3315     PushReturns(returns);
3316     return 1 + imm.length;
3317   }
3318 
DECODE(CallIndirect)3319   DECODE(CallIndirect) {
3320     CallIndirectImmediate<validate> imm(this, this->pc_ + 1);
3321     if (!this->Validate(this->pc_ + 1, imm)) return 0;
3322     Value index =
3323         Peek(0, static_cast<int>(imm.sig->parameter_count()), kWasmI32);
3324     ArgVector args = PeekArgs(imm.sig, 1);
3325     ReturnVector returns = CreateReturnValues(imm.sig);
3326     CALL_INTERFACE_IF_OK_AND_REACHABLE(CallIndirect, index, imm, args.begin(),
3327                                        returns.begin());
3328     Drop(index);
3329     DropArgs(imm.sig);
3330     PushReturns(returns);
3331     return 1 + imm.length;
3332   }
3333 
DECODE(ReturnCall)3334   DECODE(ReturnCall) {
3335     CHECK_PROTOTYPE_OPCODE(return_call);
3336     CallFunctionImmediate<validate> imm(this, this->pc_ + 1);
3337     if (!this->Validate(this->pc_ + 1, imm)) return 0;
3338     if (!VALIDATE(this->CanReturnCall(imm.sig))) {
3339       this->DecodeError("%s: %s", WasmOpcodes::OpcodeName(kExprReturnCall),
3340                         "tail call type error");
3341       return 0;
3342     }
3343     ArgVector args = PeekArgs(imm.sig);
3344     CALL_INTERFACE_IF_OK_AND_REACHABLE(ReturnCall, imm, args.begin());
3345     DropArgs(imm.sig);
3346     EndControl();
3347     return 1 + imm.length;
3348   }
3349 
DECODE(ReturnCallIndirect)3350   DECODE(ReturnCallIndirect) {
3351     CHECK_PROTOTYPE_OPCODE(return_call);
3352     CallIndirectImmediate<validate> imm(this, this->pc_ + 1);
3353     if (!this->Validate(this->pc_ + 1, imm)) return 0;
3354     if (!VALIDATE(this->CanReturnCall(imm.sig))) {
3355       this->DecodeError("%s: %s",
3356                         WasmOpcodes::OpcodeName(kExprReturnCallIndirect),
3357                         "tail call return types mismatch");
3358       return 0;
3359     }
3360     Value index = Peek(0, 0, kWasmI32);
3361     ArgVector args = PeekArgs(imm.sig, 1);
3362     CALL_INTERFACE_IF_OK_AND_REACHABLE(ReturnCallIndirect, index, imm,
3363                                        args.begin());
3364     Drop(index);
3365     DropArgs(imm.sig);
3366     EndControl();
3367     return 1 + imm.length;
3368   }
3369 
DECODE(CallRef)3370   DECODE(CallRef) {
3371     CHECK_PROTOTYPE_OPCODE(typed_funcref);
3372     Value func_ref = Peek(0);
3373     ValueType func_type = func_ref.type;
3374     if (func_type == kWasmBottom) {
3375       // We are in unreachable code, maintain the polymorphic stack.
3376       return 1;
3377     }
3378     if (!VALIDATE(func_type.is_object_reference() && func_type.has_index() &&
3379                   this->module_->has_signature(func_type.ref_index()))) {
3380       PopTypeError(0, func_ref, "function reference");
3381       return 0;
3382     }
3383     const FunctionSig* sig = this->module_->signature(func_type.ref_index());
3384     ArgVector args = PeekArgs(sig, 1);
3385     ReturnVector returns = CreateReturnValues(sig);
3386     CALL_INTERFACE_IF_OK_AND_REACHABLE(CallRef, func_ref, sig,
3387                                        func_type.ref_index(), args.begin(),
3388                                        returns.begin());
3389     Drop(func_ref);
3390     DropArgs(sig);
3391     PushReturns(returns);
3392     return 1;
3393   }
3394 
DECODE(ReturnCallRef)3395   DECODE(ReturnCallRef) {
3396     CHECK_PROTOTYPE_OPCODE(typed_funcref);
3397     CHECK_PROTOTYPE_OPCODE(return_call);
3398     Value func_ref = Peek(0);
3399     ValueType func_type = func_ref.type;
3400     if (func_type == kWasmBottom) {
3401       // We are in unreachable code, maintain the polymorphic stack.
3402       return 1;
3403     }
3404     if (!VALIDATE(func_type.is_object_reference() && func_type.has_index() &&
3405                   this->module_->has_signature(func_type.ref_index()))) {
3406       PopTypeError(0, func_ref, "function reference");
3407       return 0;
3408     }
3409     const FunctionSig* sig = this->module_->signature(func_type.ref_index());
3410     ArgVector args = PeekArgs(sig, 1);
3411     CALL_INTERFACE_IF_OK_AND_REACHABLE(ReturnCallRef, func_ref, sig,
3412                                        func_type.ref_index(), args.begin());
3413     Drop(func_ref);
3414     DropArgs(sig);
3415     EndControl();
3416     return 1;
3417   }
3418 
DECODE(Numeric)3419   DECODE(Numeric) {
3420     uint32_t opcode_length = 0;
3421     WasmOpcode full_opcode = this->template read_prefixed_opcode<validate>(
3422         this->pc_, &opcode_length, "numeric index");
3423     if (full_opcode == kExprTableGrow || full_opcode == kExprTableSize ||
3424         full_opcode == kExprTableFill) {
3425       this->detected_->Add(kFeature_reftypes);
3426     }
3427     trace_msg->AppendOpcode(full_opcode);
3428     return DecodeNumericOpcode(full_opcode, opcode_length);
3429   }
3430 
DECODE(Simd)3431   DECODE(Simd) {
3432     CHECK_PROTOTYPE_OPCODE(simd);
3433     if (!CheckHardwareSupportsSimd()) {
3434       if (FLAG_correctness_fuzzer_suppressions) {
3435         FATAL("Aborting on missing Wasm SIMD support");
3436       }
3437       this->DecodeError("Wasm SIMD unsupported");
3438       return 0;
3439     }
3440     uint32_t opcode_length = 0;
3441     WasmOpcode full_opcode = this->template read_prefixed_opcode<validate>(
3442         this->pc_, &opcode_length);
3443     if (!VALIDATE(this->ok())) return 0;
3444     trace_msg->AppendOpcode(full_opcode);
3445     if (!CheckSimdFeatureFlagOpcode(full_opcode)) {
3446       return 0;
3447     }
3448     return DecodeSimdOpcode(full_opcode, opcode_length);
3449   }
3450 
DECODE(Atomic)3451   DECODE(Atomic) {
3452     CHECK_PROTOTYPE_OPCODE(threads);
3453     uint32_t opcode_length = 0;
3454     WasmOpcode full_opcode = this->template read_prefixed_opcode<validate>(
3455         this->pc_, &opcode_length, "atomic index");
3456     trace_msg->AppendOpcode(full_opcode);
3457     return DecodeAtomicOpcode(full_opcode, opcode_length);
3458   }
3459 
DECODE(GC)3460   DECODE(GC) {
3461     CHECK_PROTOTYPE_OPCODE(gc);
3462     uint32_t opcode_length = 0;
3463     WasmOpcode full_opcode = this->template read_prefixed_opcode<validate>(
3464         this->pc_, &opcode_length, "gc index");
3465     trace_msg->AppendOpcode(full_opcode);
3466     return DecodeGCOpcode(full_opcode, opcode_length);
3467   }
3468 
3469 #define SIMPLE_PROTOTYPE_CASE(name, opc, sig) \
3470   DECODE(name) { return BuildSimplePrototypeOperator(opcode); }
3471   FOREACH_SIMPLE_PROTOTYPE_OPCODE(SIMPLE_PROTOTYPE_CASE)
3472 #undef SIMPLE_PROTOTYPE_CASE
3473 
DECODE(UnknownOrAsmJs)3474   DECODE(UnknownOrAsmJs) {
3475     // Deal with special asmjs opcodes.
3476     if (!VALIDATE(is_asmjs_module(this->module_))) {
3477       this->DecodeError("Invalid opcode 0x%x", opcode);
3478       return 0;
3479     }
3480     const FunctionSig* sig = WasmOpcodes::AsmjsSignature(opcode);
3481     DCHECK_NOT_NULL(sig);
3482     return BuildSimpleOperator(opcode, sig);
3483   }
3484 
3485 #undef DECODE
3486 
NonConstError(WasmFullDecoder* decoder, WasmOpcode opcode)3487   static int NonConstError(WasmFullDecoder* decoder, WasmOpcode opcode) {
3488     decoder->DecodeError("opcode %s is not allowed in init. expressions",
3489                          WasmOpcodes::OpcodeName(opcode));
3490     return 0;
3491   }
3492 
3493   using OpcodeHandler = int (*)(WasmFullDecoder*, WasmOpcode);
3494 
3495   // Ideally we would use template specialization for the different opcodes, but
3496   // GCC does not allow to specialize templates in class scope
3497   // (https://gcc.gnu.org/bugzilla/show_bug.cgi?id=85282), and specializing
3498   // outside the class is not allowed for non-specialized classes.
3499   // Hence just list all implementations explicitly here, which also gives more
3500   // freedom to use the same implementation for different opcodes.
3501 #define DECODE_IMPL(opcode) DECODE_IMPL2(kExpr##opcode, opcode)
3502 #define DECODE_IMPL2(opcode, name)            \
3503   if (idx == opcode) {                        \
3504     if (decoding_mode == kInitExpression) {   \
3505       return &WasmFullDecoder::NonConstError; \
3506     } else {                                  \
3507       return &WasmFullDecoder::Decode##name;  \
3508     }                                         \
3509   }
3510 #define DECODE_IMPL_CONST(opcode) DECODE_IMPL_CONST2(kExpr##opcode, opcode)
3511 #define DECODE_IMPL_CONST2(opcode, name) \
3512   if (idx == opcode) return &WasmFullDecoder::Decode##name
3513 
GetOpcodeHandlerTableEntry(size_t idx)3514   static constexpr OpcodeHandler GetOpcodeHandlerTableEntry(size_t idx) {
3515     DECODE_IMPL(Nop);
3516 #define BUILD_SIMPLE_OPCODE(op, _, sig) DECODE_IMPL(op);
3517     FOREACH_SIMPLE_NON_CONST_OPCODE(BUILD_SIMPLE_OPCODE)
3518 #undef BUILD_SIMPLE_OPCODE
3519 #define BUILD_SIMPLE_EXTENDED_CONST_OPCODE(op, _, sig) DECODE_IMPL_CONST(op);
3520     FOREACH_SIMPLE_EXTENDED_CONST_OPCODE(BUILD_SIMPLE_EXTENDED_CONST_OPCODE)
3521 #undef BUILD_SIMPLE_EXTENDED_CONST_OPCODE
3522     DECODE_IMPL(Block);
3523     DECODE_IMPL(Rethrow);
3524     DECODE_IMPL(Throw);
3525     DECODE_IMPL(Try);
3526     DECODE_IMPL(Catch);
3527     DECODE_IMPL(Delegate);
3528     DECODE_IMPL(CatchAll);
3529     DECODE_IMPL(BrOnNull);
3530     DECODE_IMPL(BrOnNonNull);
3531     DECODE_IMPL(Let);
3532     DECODE_IMPL(Loop);
3533     DECODE_IMPL(If);
3534     DECODE_IMPL(Else);
3535     DECODE_IMPL_CONST(End);
3536     DECODE_IMPL(Select);
3537     DECODE_IMPL(SelectWithType);
3538     DECODE_IMPL(Br);
3539     DECODE_IMPL(BrIf);
3540     DECODE_IMPL(BrTable);
3541     DECODE_IMPL(Return);
3542     DECODE_IMPL(Unreachable);
3543     DECODE_IMPL(NopForTestingUnsupportedInLiftoff);
3544     DECODE_IMPL_CONST(I32Const);
3545     DECODE_IMPL_CONST(I64Const);
3546     DECODE_IMPL_CONST(F32Const);
3547     DECODE_IMPL_CONST(F64Const);
3548     DECODE_IMPL_CONST(RefNull);
3549     DECODE_IMPL(RefIsNull);
3550     DECODE_IMPL_CONST(RefFunc);
3551     DECODE_IMPL(RefAsNonNull);
3552     DECODE_IMPL(LocalGet);
3553     DECODE_IMPL(LocalSet);
3554     DECODE_IMPL(LocalTee);
3555     DECODE_IMPL(Drop);
3556     DECODE_IMPL_CONST(GlobalGet);
3557     DECODE_IMPL(GlobalSet);
3558     DECODE_IMPL(TableGet);
3559     DECODE_IMPL(TableSet);
3560 #define DECODE_LOAD_MEM(op, ...) DECODE_IMPL2(kExpr##op, LoadMem);
3561     FOREACH_LOAD_MEM_OPCODE(DECODE_LOAD_MEM)
3562 #undef DECODE_LOAD_MEM
3563 #define DECODE_STORE_MEM(op, ...) DECODE_IMPL2(kExpr##op, StoreMem);
3564     FOREACH_STORE_MEM_OPCODE(DECODE_STORE_MEM)
3565 #undef DECODE_LOAD_MEM
3566     DECODE_IMPL(MemoryGrow);
3567     DECODE_IMPL(MemorySize);
3568     DECODE_IMPL(CallFunction);
3569     DECODE_IMPL(CallIndirect);
3570     DECODE_IMPL(ReturnCall);
3571     DECODE_IMPL(ReturnCallIndirect);
3572     DECODE_IMPL(CallRef);
3573     DECODE_IMPL(ReturnCallRef);
3574     DECODE_IMPL2(kNumericPrefix, Numeric);
3575     DECODE_IMPL_CONST2(kSimdPrefix, Simd);
3576     DECODE_IMPL2(kAtomicPrefix, Atomic);
3577     DECODE_IMPL_CONST2(kGCPrefix, GC);
3578 #define SIMPLE_PROTOTYPE_CASE(name, opc, sig) DECODE_IMPL(name);
3579     FOREACH_SIMPLE_PROTOTYPE_OPCODE(SIMPLE_PROTOTYPE_CASE)
3580 #undef SIMPLE_PROTOTYPE_CASE
3581     return &WasmFullDecoder::DecodeUnknownOrAsmJs;
3582   }
3583 
3584 #undef DECODE_IMPL
3585 #undef DECODE_IMPL2
3586 
GetOpcodeHandler(uint8_t opcode)3587   OpcodeHandler GetOpcodeHandler(uint8_t opcode) {
3588     static constexpr std::array<OpcodeHandler, 256> kOpcodeHandlers =
3589         base::make_array<256>(GetOpcodeHandlerTableEntry);
3590     return kOpcodeHandlers[opcode];
3591   }
3592 
EndControl()3593   void EndControl() {
3594     DCHECK(!control_.empty());
3595     Control* current = &control_.back();
3596     DCHECK_LE(stack_ + current->stack_depth, stack_end_);
3597     stack_end_ = stack_ + current->stack_depth;
3598     current->reachability = kUnreachable;
3599     current_code_reachable_and_ok_ = false;
3600   }
3601 
3602   template <typename func>
InitMerge(Merge<Value>* merge, uint32_t arity, func get_val)3603   void InitMerge(Merge<Value>* merge, uint32_t arity, func get_val) {
3604     merge->arity = arity;
3605     if (arity == 1) {
3606       merge->vals.first = get_val(0);
3607     } else if (arity > 1) {
3608       merge->vals.array = this->zone()->template NewArray<Value>(arity);
3609       for (uint32_t i = 0; i < arity; i++) {
3610         merge->vals.array[i] = get_val(i);
3611       }
3612     }
3613   }
3614 
3615   // Initializes start- and end-merges of {c} with values according to the
3616   // in- and out-types of {c} respectively.
SetBlockType(Control* c, BlockTypeImmediate<validate>& imm, Value* args)3617   void SetBlockType(Control* c, BlockTypeImmediate<validate>& imm,
3618                     Value* args) {
3619     const byte* pc = this->pc_;
3620     InitMerge(&c->end_merge, imm.out_arity(), [pc, &imm](uint32_t i) {
3621       return Value{pc, imm.out_type(i)};
3622     });
3623     InitMerge(&c->start_merge, imm.in_arity(), [&imm, args](uint32_t i) {
3624       // The merge needs to be instantiated with Values of the correct
3625       // type, even if the actual Value is bottom/unreachable or has
3626       // a subtype of the static type.
3627       // So we copy-construct a new Value, and update its type.
3628       Value value = args[i];
3629       value.type = imm.in_type(i);
3630       return value;
3631     });
3632   }
3633 
3634   // In reachable code, check if there are at least {count} values on the stack.
3635   // In unreachable code, if there are less than {count} values on the stack,
3636   // insert a number of unreachable values underneath the current values equal
3637   // to the difference, and return that number.
EnsureStackArguments(int count)3638   V8_INLINE int EnsureStackArguments(int count) {
3639     uint32_t limit = control_.back().stack_depth;
3640     if (V8_LIKELY(stack_size() >= count + limit)) return 0;
3641     return EnsureStackArguments_Slow(count, limit);
3642   }
3643 
EnsureStackArguments_Slow(int count, uint32_t limit)3644   V8_NOINLINE int EnsureStackArguments_Slow(int count, uint32_t limit) {
3645     if (!VALIDATE(control_.back().unreachable())) {
3646       NotEnoughArgumentsError(count, stack_size() - limit);
3647     }
3648     // Silently create unreachable values out of thin air underneath the
3649     // existing stack values. To do so, we have to move existing stack values
3650     // upwards in the stack, then instantiate the new Values as
3651     // {UnreachableValue}.
3652     int current_values = stack_size() - limit;
3653     int additional_values = count - current_values;
3654     DCHECK_GT(additional_values, 0);
3655     EnsureStackSpace(additional_values);
3656     stack_end_ += additional_values;
3657     Value* stack_base = stack_value(current_values + additional_values);
3658     for (int i = current_values - 1; i >= 0; i--) {
3659       stack_base[additional_values + i] = stack_base[i];
3660     }
3661     for (int i = 0; i < additional_values; i++) {
3662       stack_base[i] = UnreachableValue(this->pc_);
3663     }
3664     return additional_values;
3665   }
3666 
3667   // Peeks arguments as required by signature.
PeekArgs(const FunctionSig* sig, int depth = 0)3668   V8_INLINE ArgVector PeekArgs(const FunctionSig* sig, int depth = 0) {
3669     int count = sig ? static_cast<int>(sig->parameter_count()) : 0;
3670     if (count == 0) return {};
3671     EnsureStackArguments(depth + count);
3672     ArgVector args(stack_value(depth + count), count);
3673     for (int i = 0; i < count; i++) {
3674       ValidateArgType(args, i, sig->GetParam(i));
3675     }
3676     return args;
3677   }
3678   // Drops a number of stack elements equal to the {sig}'s parameter count (0 if
3679   // {sig} is null), or all of them if less are present.
DropArgs(const FunctionSig* sig)3680   V8_INLINE void DropArgs(const FunctionSig* sig) {
3681     int count = sig ? static_cast<int>(sig->parameter_count()) : 0;
3682     Drop(count);
3683   }
3684 
PeekArgs(const StructType* type, int depth = 0)3685   V8_INLINE ArgVector PeekArgs(const StructType* type, int depth = 0) {
3686     int count = static_cast<int>(type->field_count());
3687     if (count == 0) return {};
3688     EnsureStackArguments(depth + count);
3689     ArgVector args(stack_value(depth + count), count);
3690     for (int i = 0; i < count; i++) {
3691       ValidateArgType(args, i, type->field(i).Unpacked());
3692     }
3693     return args;
3694   }
3695   // Drops a number of stack elements equal to the struct's field count, or all
3696   // of them if less are present.
DropArgs(const StructType* type)3697   V8_INLINE void DropArgs(const StructType* type) {
3698     Drop(static_cast<int>(type->field_count()));
3699   }
3700 
PeekArgs(base::Vector<ValueType> arg_types)3701   V8_INLINE ArgVector PeekArgs(base::Vector<ValueType> arg_types) {
3702     int size = static_cast<int>(arg_types.size());
3703     EnsureStackArguments(size);
3704     ArgVector args(stack_value(size), arg_types.size());
3705     for (int i = 0; i < size; i++) {
3706       ValidateArgType(args, i, arg_types[i]);
3707     }
3708     return args;
3709   }
3710 
GetReturnType(const FunctionSig* sig)3711   ValueType GetReturnType(const FunctionSig* sig) {
3712     DCHECK_GE(1, sig->return_count());
3713     return sig->return_count() == 0 ? kWasmVoid : sig->GetReturn();
3714   }
3715 
3716   // TODO(jkummerow): Consider refactoring control stack management so
3717   // that {drop_values} is never needed. That would require decoupling
3718   // creation of the Control object from setting of its stack depth.
PushControl(ControlKind kind, uint32_t locals_count = 0, uint32_t drop_values = 0)3719   Control* PushControl(ControlKind kind, uint32_t locals_count = 0,
3720                        uint32_t drop_values = 0) {
3721     DCHECK(!control_.empty());
3722     Reachability reachability = control_.back().innerReachability();
3723     // In unreachable code, we may run out of stack.
3724     uint32_t stack_depth =
3725         stack_size() >= drop_values ? stack_size() - drop_values : 0;
3726     stack_depth = std::max(stack_depth, control_.back().stack_depth);
3727     uint32_t init_stack_depth = this->locals_initialization_stack_depth();
3728     control_.emplace_back(kind, locals_count, stack_depth, init_stack_depth,
3729                           this->pc_, reachability);
3730     current_code_reachable_and_ok_ = this->ok() && reachability == kReachable;
3731     return &control_.back();
3732   }
3733 
PopControl()3734   void PopControl() {
3735     // This cannot be the outermost control block.
3736     DCHECK_LT(1, control_.size());
3737     Control* c = &control_.back();
3738     DCHECK_LE(stack_ + c->stack_depth, stack_end_);
3739 
3740     CALL_INTERFACE_IF_OK_AND_PARENT_REACHABLE(PopControl, c);
3741 
3742     // - In non-unreachable code, a loop just leaves the values on the stack.
3743     // - In unreachable code, it is not guaranteed that we have Values of the
3744     //   correct types on the stack, so we have to make sure we do. Their values
3745     //   do not matter, so we might as well push the (uninitialized) values of
3746     //   the loop's end merge.
3747     if (!c->is_loop() || c->unreachable()) {
3748       PushMergeValues(c, &c->end_merge);
3749     }
3750     this->RollbackLocalsInitialization(c->init_stack_depth);
3751 
3752     bool parent_reached =
3753         c->reachable() || c->end_merge.reached || c->is_onearmed_if();
3754     control_.pop_back();
3755     // If the parent block was reachable before, but the popped control does not
3756     // return to here, this block becomes "spec only reachable".
3757     if (!parent_reached) SetSucceedingCodeDynamicallyUnreachable();
3758     current_code_reachable_and_ok_ = this->ok() && control_.back().reachable();
3759   }
3760 
DecodeLoadMem(LoadType type, int prefix_len = 1)3761   int DecodeLoadMem(LoadType type, int prefix_len = 1) {
3762     MemoryAccessImmediate<validate> imm =
3763         MakeMemoryAccessImmediate(prefix_len, type.size_log_2());
3764     if (!this->Validate(this->pc_ + prefix_len, imm)) return 0;
3765     ValueType index_type = this->module_->is_memory64 ? kWasmI64 : kWasmI32;
3766     Value index = Peek(0, 0, index_type);
3767     Value result = CreateValue(type.value_type());
3768     CALL_INTERFACE_IF_OK_AND_REACHABLE(LoadMem, type, imm, index, &result);
3769     Drop(index);
3770     Push(result);
3771     return prefix_len + imm.length;
3772   }
3773 
DecodeLoadTransformMem(LoadType type, LoadTransformationKind transform, uint32_t opcode_length)3774   int DecodeLoadTransformMem(LoadType type, LoadTransformationKind transform,
3775                              uint32_t opcode_length) {
3776     // Load extends always load 64-bits.
3777     uint32_t max_alignment =
3778         transform == LoadTransformationKind::kExtend ? 3 : type.size_log_2();
3779     MemoryAccessImmediate<validate> imm =
3780         MakeMemoryAccessImmediate(opcode_length, max_alignment);
3781     if (!this->Validate(this->pc_ + opcode_length, imm)) return 0;
3782     ValueType index_type = this->module_->is_memory64 ? kWasmI64 : kWasmI32;
3783     Value index = Peek(0, 0, index_type);
3784     Value result = CreateValue(kWasmS128);
3785     CALL_INTERFACE_IF_OK_AND_REACHABLE(LoadTransform, type, transform, imm,
3786                                        index, &result);
3787     Drop(index);
3788     Push(result);
3789     return opcode_length + imm.length;
3790   }
3791 
DecodeLoadLane(WasmOpcode opcode, LoadType type, uint32_t opcode_length)3792   int DecodeLoadLane(WasmOpcode opcode, LoadType type, uint32_t opcode_length) {
3793     MemoryAccessImmediate<validate> mem_imm =
3794         MakeMemoryAccessImmediate(opcode_length, type.size_log_2());
3795     if (!this->Validate(this->pc_ + opcode_length, mem_imm)) return 0;
3796     SimdLaneImmediate<validate> lane_imm(
3797         this, this->pc_ + opcode_length + mem_imm.length);
3798     if (!this->Validate(this->pc_ + opcode_length, opcode, lane_imm)) return 0;
3799     Value v128 = Peek(0, 1, kWasmS128);
3800     Value index = Peek(1, 0, kWasmI32);
3801 
3802     Value result = CreateValue(kWasmS128);
3803     CALL_INTERFACE_IF_OK_AND_REACHABLE(LoadLane, type, v128, index, mem_imm,
3804                                        lane_imm.lane, &result);
3805     Drop(2);
3806     Push(result);
3807     return opcode_length + mem_imm.length + lane_imm.length;
3808   }
3809 
DecodeStoreLane(WasmOpcode opcode, StoreType type, uint32_t opcode_length)3810   int DecodeStoreLane(WasmOpcode opcode, StoreType type,
3811                       uint32_t opcode_length) {
3812     MemoryAccessImmediate<validate> mem_imm =
3813         MakeMemoryAccessImmediate(opcode_length, type.size_log_2());
3814     if (!this->Validate(this->pc_ + opcode_length, mem_imm)) return 0;
3815     SimdLaneImmediate<validate> lane_imm(
3816         this, this->pc_ + opcode_length + mem_imm.length);
3817     if (!this->Validate(this->pc_ + opcode_length, opcode, lane_imm)) return 0;
3818     Value v128 = Peek(0, 1, kWasmS128);
3819     Value index = Peek(1, 0, kWasmI32);
3820 
3821     CALL_INTERFACE_IF_OK_AND_REACHABLE(StoreLane, type, mem_imm, index, v128,
3822                                        lane_imm.lane);
3823     Drop(2);
3824     return opcode_length + mem_imm.length + lane_imm.length;
3825   }
3826 
DecodeStoreMem(StoreType store, int prefix_len = 1)3827   int DecodeStoreMem(StoreType store, int prefix_len = 1) {
3828     MemoryAccessImmediate<validate> imm =
3829         MakeMemoryAccessImmediate(prefix_len, store.size_log_2());
3830     if (!this->Validate(this->pc_ + prefix_len, imm)) return 0;
3831     Value value = Peek(0, 1, store.value_type());
3832     ValueType index_type = this->module_->is_memory64 ? kWasmI64 : kWasmI32;
3833     Value index = Peek(1, 0, index_type);
3834     CALL_INTERFACE_IF_OK_AND_REACHABLE(StoreMem, store, imm, index, value);
3835     Drop(2);
3836     return prefix_len + imm.length;
3837   }
3838 
SimdConstOp(uint32_t opcode_length)3839   uint32_t SimdConstOp(uint32_t opcode_length) {
3840     Simd128Immediate<validate> imm(this, this->pc_ + opcode_length);
3841     Value result = CreateValue(kWasmS128);
3842     CALL_INTERFACE_IF_OK_AND_REACHABLE(S128Const, imm, &result);
3843     Push(result);
3844     return opcode_length + kSimd128Size;
3845   }
3846 
SimdExtractLane(WasmOpcode opcode, ValueType type, uint32_t opcode_length)3847   uint32_t SimdExtractLane(WasmOpcode opcode, ValueType type,
3848                            uint32_t opcode_length) {
3849     SimdLaneImmediate<validate> imm(this, this->pc_ + opcode_length);
3850     if (this->Validate(this->pc_ + opcode_length, opcode, imm)) {
3851       Value inputs[] = {Peek(0, 0, kWasmS128)};
3852       Value result = CreateValue(type);
3853       CALL_INTERFACE_IF_OK_AND_REACHABLE(SimdLaneOp, opcode, imm,
3854                                          base::ArrayVector(inputs), &result);
3855       Drop(1);
3856       Push(result);
3857     }
3858     return opcode_length + imm.length;
3859   }
3860 
SimdReplaceLane(WasmOpcode opcode, ValueType type, uint32_t opcode_length)3861   uint32_t SimdReplaceLane(WasmOpcode opcode, ValueType type,
3862                            uint32_t opcode_length) {
3863     SimdLaneImmediate<validate> imm(this, this->pc_ + opcode_length);
3864     if (this->Validate(this->pc_ + opcode_length, opcode, imm)) {
3865       Value inputs[2] = {Peek(1, 0, kWasmS128), Peek(0, 1, type)};
3866       Value result = CreateValue(kWasmS128);
3867       CALL_INTERFACE_IF_OK_AND_REACHABLE(SimdLaneOp, opcode, imm,
3868                                          base::ArrayVector(inputs), &result);
3869       Drop(2);
3870       Push(result);
3871     }
3872     return opcode_length + imm.length;
3873   }
3874 
Simd8x16ShuffleOp(uint32_t opcode_length)3875   uint32_t Simd8x16ShuffleOp(uint32_t opcode_length) {
3876     Simd128Immediate<validate> imm(this, this->pc_ + opcode_length);
3877     if (this->Validate(this->pc_ + opcode_length, imm)) {
3878       Value input1 = Peek(0, 1, kWasmS128);
3879       Value input0 = Peek(1, 0, kWasmS128);
3880       Value result = CreateValue(kWasmS128);
3881       CALL_INTERFACE_IF_OK_AND_REACHABLE(Simd8x16ShuffleOp, imm, input0, input1,
3882                                          &result);
3883       Drop(2);
3884       Push(result);
3885     }
3886     return opcode_length + 16;
3887   }
3888 
DecodeSimdOpcode(WasmOpcode opcode, uint32_t opcode_length)3889   uint32_t DecodeSimdOpcode(WasmOpcode opcode, uint32_t opcode_length) {
3890     if (decoding_mode == kInitExpression) {
3891       // Currently, only s128.const is allowed in initializer expressions.
3892       if (opcode != kExprS128Const) {
3893         this->DecodeError("opcode %s is not allowed in init. expressions",
3894                           this->SafeOpcodeNameAt(this->pc()));
3895         return 0;
3896       }
3897       return SimdConstOp(opcode_length);
3898     }
3899     // opcode_length is the number of bytes that this SIMD-specific opcode takes
3900     // up in the LEB128 encoded form.
3901     switch (opcode) {
3902       case kExprF64x2ExtractLane:
3903         return SimdExtractLane(opcode, kWasmF64, opcode_length);
3904       case kExprF32x4ExtractLane:
3905         return SimdExtractLane(opcode, kWasmF32, opcode_length);
3906       case kExprI64x2ExtractLane:
3907         return SimdExtractLane(opcode, kWasmI64, opcode_length);
3908       case kExprI32x4ExtractLane:
3909       case kExprI16x8ExtractLaneS:
3910       case kExprI16x8ExtractLaneU:
3911       case kExprI8x16ExtractLaneS:
3912       case kExprI8x16ExtractLaneU:
3913         return SimdExtractLane(opcode, kWasmI32, opcode_length);
3914       case kExprF64x2ReplaceLane:
3915         return SimdReplaceLane(opcode, kWasmF64, opcode_length);
3916       case kExprF32x4ReplaceLane:
3917         return SimdReplaceLane(opcode, kWasmF32, opcode_length);
3918       case kExprI64x2ReplaceLane:
3919         return SimdReplaceLane(opcode, kWasmI64, opcode_length);
3920       case kExprI32x4ReplaceLane:
3921       case kExprI16x8ReplaceLane:
3922       case kExprI8x16ReplaceLane:
3923         return SimdReplaceLane(opcode, kWasmI32, opcode_length);
3924       case kExprI8x16Shuffle:
3925         return Simd8x16ShuffleOp(opcode_length);
3926       case kExprS128LoadMem:
3927         return DecodeLoadMem(LoadType::kS128Load, opcode_length);
3928       case kExprS128StoreMem:
3929         return DecodeStoreMem(StoreType::kS128Store, opcode_length);
3930       case kExprS128Load32Zero:
3931         return DecodeLoadTransformMem(LoadType::kI32Load,
3932                                       LoadTransformationKind::kZeroExtend,
3933                                       opcode_length);
3934       case kExprS128Load64Zero:
3935         return DecodeLoadTransformMem(LoadType::kI64Load,
3936                                       LoadTransformationKind::kZeroExtend,
3937                                       opcode_length);
3938       case kExprS128Load8Splat:
3939         return DecodeLoadTransformMem(LoadType::kI32Load8S,
3940                                       LoadTransformationKind::kSplat,
3941                                       opcode_length);
3942       case kExprS128Load16Splat:
3943         return DecodeLoadTransformMem(LoadType::kI32Load16S,
3944                                       LoadTransformationKind::kSplat,
3945                                       opcode_length);
3946       case kExprS128Load32Splat:
3947         return DecodeLoadTransformMem(
3948             LoadType::kI32Load, LoadTransformationKind::kSplat, opcode_length);
3949       case kExprS128Load64Splat:
3950         return DecodeLoadTransformMem(
3951             LoadType::kI64Load, LoadTransformationKind::kSplat, opcode_length);
3952       case kExprS128Load8x8S:
3953         return DecodeLoadTransformMem(LoadType::kI32Load8S,
3954                                       LoadTransformationKind::kExtend,
3955                                       opcode_length);
3956       case kExprS128Load8x8U:
3957         return DecodeLoadTransformMem(LoadType::kI32Load8U,
3958                                       LoadTransformationKind::kExtend,
3959                                       opcode_length);
3960       case kExprS128Load16x4S:
3961         return DecodeLoadTransformMem(LoadType::kI32Load16S,
3962                                       LoadTransformationKind::kExtend,
3963                                       opcode_length);
3964       case kExprS128Load16x4U:
3965         return DecodeLoadTransformMem(LoadType::kI32Load16U,
3966                                       LoadTransformationKind::kExtend,
3967                                       opcode_length);
3968       case kExprS128Load32x2S:
3969         return DecodeLoadTransformMem(LoadType::kI64Load32S,
3970                                       LoadTransformationKind::kExtend,
3971                                       opcode_length);
3972       case kExprS128Load32x2U:
3973         return DecodeLoadTransformMem(LoadType::kI64Load32U,
3974                                       LoadTransformationKind::kExtend,
3975                                       opcode_length);
3976       case kExprS128Load8Lane: {
3977         return DecodeLoadLane(opcode, LoadType::kI32Load8S, opcode_length);
3978       }
3979       case kExprS128Load16Lane: {
3980         return DecodeLoadLane(opcode, LoadType::kI32Load16S, opcode_length);
3981       }
3982       case kExprS128Load32Lane: {
3983         return DecodeLoadLane(opcode, LoadType::kI32Load, opcode_length);
3984       }
3985       case kExprS128Load64Lane: {
3986         return DecodeLoadLane(opcode, LoadType::kI64Load, opcode_length);
3987       }
3988       case kExprS128Store8Lane: {
3989         return DecodeStoreLane(opcode, StoreType::kI32Store8, opcode_length);
3990       }
3991       case kExprS128Store16Lane: {
3992         return DecodeStoreLane(opcode, StoreType::kI32Store16, opcode_length);
3993       }
3994       case kExprS128Store32Lane: {
3995         return DecodeStoreLane(opcode, StoreType::kI32Store, opcode_length);
3996       }
3997       case kExprS128Store64Lane: {
3998         return DecodeStoreLane(opcode, StoreType::kI64Store, opcode_length);
3999       }
4000       case kExprS128Const:
4001         return SimdConstOp(opcode_length);
4002       default: {
4003         const FunctionSig* sig = WasmOpcodes::Signature(opcode);
4004         if (!VALIDATE(sig != nullptr)) {
4005           this->DecodeError("invalid simd opcode");
4006           return 0;
4007         }
4008         ArgVector args = PeekArgs(sig);
4009         if (sig->return_count() == 0) {
4010           CALL_INTERFACE_IF_OK_AND_REACHABLE(SimdOp, opcode,
4011                                              base::VectorOf(args), nullptr);
4012           DropArgs(sig);
4013         } else {
4014           ReturnVector results = CreateReturnValues(sig);
4015           CALL_INTERFACE_IF_OK_AND_REACHABLE(
4016               SimdOp, opcode, base::VectorOf(args), results.begin());
4017           DropArgs(sig);
4018           PushReturns(results);
4019         }
4020         return opcode_length;
4021       }
4022     }
4023   }
4024 
4025   // Checks if types are unrelated, thus type checking will always fail. Does
4026   // not account for nullability.
TypeCheckAlwaysFails(Value obj, Value rtt)4027   bool TypeCheckAlwaysFails(Value obj, Value rtt) {
4028     return !IsSubtypeOf(ValueType::Ref(rtt.type.ref_index(), kNonNullable),
4029                         obj.type, this->module_) &&
4030            !IsSubtypeOf(obj.type,
4031                         ValueType::Ref(rtt.type.ref_index(), kNullable),
4032                         this->module_);
4033   }
4034 
4035   // Checks it {obj} is a nominal type which is a subtype of {rtt}'s index, thus
4036   // checking will always succeed. Does not account for nullability.
TypeCheckAlwaysSucceeds(Value obj, Value rtt)4037   bool TypeCheckAlwaysSucceeds(Value obj, Value rtt) {
4038     return obj.type.has_index() &&
4039            this->module_->has_supertype(obj.type.ref_index()) &&
4040            IsSubtypeOf(obj.type,
4041                        ValueType::Ref(rtt.type.ref_index(), kNullable),
4042                        this->module_);
4043   }
4044 
4045 #define NON_CONST_ONLY                                                 \
4046   if (decoding_mode == kInitExpression) {                              \
4047     this->DecodeError("opcode %s is not allowed in init. expressions", \
4048                       this->SafeOpcodeNameAt(this->pc()));             \
4049     return 0;                                                          \
4050   }
4051 
DecodeGCOpcode(WasmOpcode opcode, uint32_t opcode_length)4052   int DecodeGCOpcode(WasmOpcode opcode, uint32_t opcode_length) {
4053     switch (opcode) {
4054       case kExprStructNew:
4055       case kExprStructNewWithRtt: {
4056         StructIndexImmediate<validate> imm(this, this->pc_ + opcode_length);
4057         if (!this->Validate(this->pc_ + opcode_length, imm)) return 0;
4058         ValueType rtt_type = ValueType::Rtt(imm.index);
4059         Value rtt = opcode == kExprStructNew
4060                         ? CreateValue(rtt_type)
4061                         : Peek(0, imm.struct_type->field_count(), rtt_type);
4062         if (opcode == kExprStructNew) {
4063           CALL_INTERFACE_IF_OK_AND_REACHABLE(RttCanon, imm.index, &rtt);
4064           Push(rtt);
4065         }
4066         ArgVector args = PeekArgs(imm.struct_type, 1);
4067         Value value = CreateValue(ValueType::Ref(imm.index, kNonNullable));
4068         CALL_INTERFACE_IF_OK_AND_REACHABLE(StructNewWithRtt, imm, rtt,
4069                                            args.begin(), &value);
4070         Drop(rtt);
4071         DropArgs(imm.struct_type);
4072         Push(value);
4073         return opcode_length + imm.length;
4074       }
4075       case kExprStructNewDefault:
4076       case kExprStructNewDefaultWithRtt: {
4077         StructIndexImmediate<validate> imm(this, this->pc_ + opcode_length);
4078         if (!this->Validate(this->pc_ + opcode_length, imm)) return 0;
4079         if (validate) {
4080           for (uint32_t i = 0; i < imm.struct_type->field_count(); i++) {
4081             ValueType ftype = imm.struct_type->field(i);
4082             if (!VALIDATE(ftype.is_defaultable())) {
4083               this->DecodeError(
4084                   "%s: struct type %d has field %d of non-defaultable type %s",
4085                   WasmOpcodes::OpcodeName(opcode), imm.index, i,
4086                   ftype.name().c_str());
4087               return 0;
4088             }
4089           }
4090         }
4091         ValueType rtt_type = ValueType::Rtt(imm.index);
4092         Value rtt = opcode == kExprStructNewDefault ? CreateValue(rtt_type)
4093                                                     : Peek(0, 0, rtt_type);
4094         if (opcode == kExprStructNewDefault) {
4095           CALL_INTERFACE_IF_OK_AND_REACHABLE(RttCanon, imm.index, &rtt);
4096           Push(rtt);
4097         }
4098         Value value = CreateValue(ValueType::Ref(imm.index, kNonNullable));
4099         CALL_INTERFACE_IF_OK_AND_REACHABLE(StructNewDefault, imm, rtt, &value);
4100         Drop(rtt);
4101         Push(value);
4102         return opcode_length + imm.length;
4103       }
4104       case kExprStructGet: {
4105         NON_CONST_ONLY
4106         FieldImmediate<validate> field(this, this->pc_ + opcode_length);
4107         if (!this->Validate(this->pc_ + opcode_length, field)) return 0;
4108         ValueType field_type =
4109             field.struct_imm.struct_type->field(field.field_imm.index);
4110         if (!VALIDATE(!field_type.is_packed())) {
4111           this->DecodeError(
4112               "struct.get: Immediate field %d of type %d has packed type %s. "
4113               "Use struct.get_s or struct.get_u instead.",
4114               field.field_imm.index, field.struct_imm.index,
4115               field_type.name().c_str());
4116           return 0;
4117         }
4118         Value struct_obj =
4119             Peek(0, 0, ValueType::Ref(field.struct_imm.index, kNullable));
4120         Value value = CreateValue(field_type);
4121         CALL_INTERFACE_IF_OK_AND_REACHABLE(StructGet, struct_obj, field, true,
4122                                            &value);
4123         Drop(struct_obj);
4124         Push(value);
4125         return opcode_length + field.length;
4126       }
4127       case kExprStructGetU:
4128       case kExprStructGetS: {
4129         NON_CONST_ONLY
4130         FieldImmediate<validate> field(this, this->pc_ + opcode_length);
4131         if (!this->Validate(this->pc_ + opcode_length, field)) return 0;
4132         ValueType field_type =
4133             field.struct_imm.struct_type->field(field.field_imm.index);
4134         if (!VALIDATE(field_type.is_packed())) {
4135           this->DecodeError(
4136               "%s: Immediate field %d of type %d has non-packed type %s. Use "
4137               "struct.get instead.",
4138               WasmOpcodes::OpcodeName(opcode), field.field_imm.index,
4139               field.struct_imm.index, field_type.name().c_str());
4140           return 0;
4141         }
4142         Value struct_obj =
4143             Peek(0, 0, ValueType::Ref(field.struct_imm.index, kNullable));
4144         Value value = CreateValue(field_type.Unpacked());
4145         CALL_INTERFACE_IF_OK_AND_REACHABLE(StructGet, struct_obj, field,
4146                                            opcode == kExprStructGetS, &value);
4147         Drop(struct_obj);
4148         Push(value);
4149         return opcode_length + field.length;
4150       }
4151       case kExprStructSet: {
4152         NON_CONST_ONLY
4153         FieldImmediate<validate> field(this, this->pc_ + opcode_length);
4154         if (!this->Validate(this->pc_ + opcode_length, field)) return 0;
4155         const StructType* struct_type = field.struct_imm.struct_type;
4156         if (!VALIDATE(struct_type->mutability(field.field_imm.index))) {
4157           this->DecodeError("struct.set: Field %d of type %d is immutable.",
4158                             field.field_imm.index, field.struct_imm.index);
4159           return 0;
4160         }
4161         Value field_value =
4162             Peek(0, 1, struct_type->field(field.field_imm.index).Unpacked());
4163         Value struct_obj =
4164             Peek(1, 0, ValueType::Ref(field.struct_imm.index, kNullable));
4165         CALL_INTERFACE_IF_OK_AND_REACHABLE(StructSet, struct_obj, field,
4166                                            field_value);
4167         Drop(2);
4168         return opcode_length + field.length;
4169       }
4170       case kExprArrayNew:
4171       case kExprArrayNewWithRtt: {
4172         NON_CONST_ONLY
4173         ArrayIndexImmediate<validate> imm(this, this->pc_ + opcode_length);
4174         if (!this->Validate(this->pc_ + opcode_length, imm)) return 0;
4175         ValueType rtt_type = ValueType::Rtt(imm.index);
4176         Value rtt = opcode == kExprArrayNew ? CreateValue(rtt_type)
4177                                             : Peek(0, 2, rtt_type);
4178         if (opcode == kExprArrayNew) {
4179           CALL_INTERFACE_IF_OK_AND_REACHABLE(RttCanon, imm.index, &rtt);
4180           Push(rtt);
4181         }
4182         Value length = Peek(1, 1, kWasmI32);
4183         Value initial_value =
4184             Peek(2, 0, imm.array_type->element_type().Unpacked());
4185         Value value = CreateValue(ValueType::Ref(imm.index, kNonNullable));
4186         CALL_INTERFACE_IF_OK_AND_REACHABLE(ArrayNewWithRtt, imm, length,
4187                                            initial_value, rtt, &value);
4188         Drop(3);  // rtt, length, initial_value.
4189         Push(value);
4190         return opcode_length + imm.length;
4191       }
4192       case kExprArrayNewDefault:
4193       case kExprArrayNewDefaultWithRtt: {
4194         NON_CONST_ONLY
4195         ArrayIndexImmediate<validate> imm(this, this->pc_ + opcode_length);
4196         if (!this->Validate(this->pc_ + opcode_length, imm)) return 0;
4197         if (!VALIDATE(imm.array_type->element_type().is_defaultable())) {
4198           this->DecodeError(
4199               "%s: array type %d has non-defaultable element type %s",
4200               WasmOpcodes::OpcodeName(opcode), imm.index,
4201               imm.array_type->element_type().name().c_str());
4202           return 0;
4203         }
4204         ValueType rtt_type = ValueType::Rtt(imm.index);
4205         Value rtt = opcode == kExprArrayNewDefault ? CreateValue(rtt_type)
4206                                                    : Peek(0, 1, rtt_type);
4207         if (opcode == kExprArrayNewDefault) {
4208           CALL_INTERFACE_IF_OK_AND_REACHABLE(RttCanon, imm.index, &rtt);
4209           Push(rtt);
4210         }
4211         Value length = Peek(1, 0, kWasmI32);
4212         Value value = CreateValue(ValueType::Ref(imm.index, kNonNullable));
4213         CALL_INTERFACE_IF_OK_AND_REACHABLE(ArrayNewDefault, imm, length, rtt,
4214                                            &value);
4215         Drop(2);  // rtt, length
4216         Push(value);
4217         return opcode_length + imm.length;
4218       }
4219       case kExprArrayInitFromData:
4220       case kExprArrayInitFromDataStatic: {
4221         ArrayIndexImmediate<validate> array_imm(this,
4222                                                 this->pc_ + opcode_length);
4223         if (!this->Validate(this->pc_ + opcode_length, array_imm)) return 0;
4224         ValueType element_type = array_imm.array_type->element_type();
4225         if (element_type.is_reference()) {
4226           this->DecodeError(
4227               "array.init_from_data can only be used with value-type arrays, "
4228               "found array type #%d instead",
4229               array_imm.index);
4230           return 0;
4231         }
4232 #if V8_TARGET_BIG_ENDIAN
4233         // Byte sequences in data segments are interpreted as little endian for
4234         // the purposes of this instruction. This means that those will have to
4235         // be transformed in big endian architectures. TODO(7748): Implement.
4236         if (element_type.value_kind_size() > 1) {
4237           UNIMPLEMENTED();
4238         }
4239 #endif
4240         const byte* data_index_pc =
4241             this->pc_ + opcode_length + array_imm.length;
4242         IndexImmediate<validate> data_segment(this, data_index_pc,
4243                                               "data segment");
4244         if (!this->ValidateDataSegment(data_index_pc, data_segment)) return 0;
4245 
4246         ValueType rtt_type = ValueType::Rtt(array_imm.index);
4247         Value rtt = opcode == kExprArrayInitFromDataStatic
4248                         ? CreateValue(rtt_type)
4249                         : Peek(0, 2, rtt_type);
4250         if (opcode == kExprArrayInitFromDataStatic) {
4251           CALL_INTERFACE_IF_OK_AND_REACHABLE(RttCanon, array_imm.index, &rtt);
4252           Push(rtt);
4253         }
4254 
4255         Value length = Peek(1, 1, kWasmI32);
4256         Value offset = Peek(2, 0, kWasmI32);
4257 
4258         Value array =
4259             CreateValue(ValueType::Ref(array_imm.index, kNonNullable));
4260         CALL_INTERFACE_IF_OK_AND_REACHABLE(ArrayInitFromData, array_imm,
4261                                            data_segment, offset, length, rtt,
4262                                            &array);
4263         Drop(3);  // rtt, length, offset
4264         Push(array);
4265         return opcode_length + array_imm.length + data_segment.length;
4266       }
4267       case kExprArrayGetS:
4268       case kExprArrayGetU: {
4269         NON_CONST_ONLY
4270         ArrayIndexImmediate<validate> imm(this, this->pc_ + opcode_length);
4271         if (!this->Validate(this->pc_ + opcode_length, imm)) return 0;
4272         if (!VALIDATE(imm.array_type->element_type().is_packed())) {
4273           this->DecodeError(
4274               "%s: Immediate array type %d has non-packed type %s. Use "
4275               "array.get instead.",
4276               WasmOpcodes::OpcodeName(opcode), imm.index,
4277               imm.array_type->element_type().name().c_str());
4278           return 0;
4279         }
4280         Value index = Peek(0, 1, kWasmI32);
4281         Value array_obj = Peek(1, 0, ValueType::Ref(imm.index, kNullable));
4282         Value value = CreateValue(imm.array_type->element_type().Unpacked());
4283         CALL_INTERFACE_IF_OK_AND_REACHABLE(ArrayGet, array_obj, imm, index,
4284                                            opcode == kExprArrayGetS, &value);
4285         Drop(2);  // index, array_obj
4286         Push(value);
4287         return opcode_length + imm.length;
4288       }
4289       case kExprArrayGet: {
4290         NON_CONST_ONLY
4291         ArrayIndexImmediate<validate> imm(this, this->pc_ + opcode_length);
4292         if (!this->Validate(this->pc_ + opcode_length, imm)) return 0;
4293         if (!VALIDATE(!imm.array_type->element_type().is_packed())) {
4294           this->DecodeError(
4295               "array.get: Immediate array type %d has packed type %s. Use "
4296               "array.get_s or array.get_u instead.",
4297               imm.index, imm.array_type->element_type().name().c_str());
4298           return 0;
4299         }
4300         Value index = Peek(0, 1, kWasmI32);
4301         Value array_obj = Peek(1, 0, ValueType::Ref(imm.index, kNullable));
4302         Value value = CreateValue(imm.array_type->element_type());
4303         CALL_INTERFACE_IF_OK_AND_REACHABLE(ArrayGet, array_obj, imm, index,
4304                                            true, &value);
4305         Drop(2);  // index, array_obj
4306         Push(value);
4307         return opcode_length + imm.length;
4308       }
4309       case kExprArraySet: {
4310         NON_CONST_ONLY
4311         ArrayIndexImmediate<validate> imm(this, this->pc_ + opcode_length);
4312         if (!this->Validate(this->pc_ + opcode_length, imm)) return 0;
4313         if (!VALIDATE(imm.array_type->mutability())) {
4314           this->DecodeError("array.set: immediate array type %d is immutable",
4315                             imm.index);
4316           return 0;
4317         }
4318         Value value = Peek(0, 2, imm.array_type->element_type().Unpacked());
4319         Value index = Peek(1, 1, kWasmI32);
4320         Value array_obj = Peek(2, 0, ValueType::Ref(imm.index, kNullable));
4321         CALL_INTERFACE_IF_OK_AND_REACHABLE(ArraySet, array_obj, imm, index,
4322                                            value);
4323         Drop(3);
4324         return opcode_length + imm.length;
4325       }
4326       case kExprArrayLen: {
4327         NON_CONST_ONLY
4328         // Read but ignore an immediate array type index.
4329         // TODO(7748): Remove this once we are ready to make breaking changes.
4330         ArrayIndexImmediate<validate> imm(this, this->pc_ + opcode_length);
4331         Value array_obj =
4332             Peek(0, 0, ValueType::Ref(HeapType::kArray, kNullable));
4333         Value value = CreateValue(kWasmI32);
4334         CALL_INTERFACE_IF_OK_AND_REACHABLE(ArrayLen, array_obj, &value);
4335         Drop(array_obj);
4336         Push(value);
4337         return opcode_length + imm.length;
4338       }
4339       case kExprArrayCopy: {
4340         NON_CONST_ONLY
4341         ArrayIndexImmediate<validate> dst_imm(this, this->pc_ + opcode_length);
4342         if (!this->Validate(this->pc_ + opcode_length, dst_imm)) return 0;
4343         if (!VALIDATE(dst_imm.array_type->mutability())) {
4344           this->DecodeError(
4345               "array.copy: immediate destination array type #%d is immutable",
4346               dst_imm.index);
4347           return 0;
4348         }
4349         ArrayIndexImmediate<validate> src_imm(
4350             this, this->pc_ + opcode_length + dst_imm.length);
4351         if (!this->Validate(this->pc_ + opcode_length + dst_imm.length,
4352                             src_imm)) {
4353           return 0;
4354         }
4355         if (!IsSubtypeOf(src_imm.array_type->element_type(),
4356                          dst_imm.array_type->element_type(), this->module_)) {
4357           this->DecodeError(
4358               "array.copy: source array's #%d element type is not a subtype of "
4359               "destination array's #%d element type",
4360               src_imm.index, dst_imm.index);
4361           return 0;
4362         }
4363         // [dst, dst_index, src, src_index, length]
4364         Value dst = Peek(4, 0, ValueType::Ref(dst_imm.index, kNullable));
4365         Value dst_index = Peek(3, 1, kWasmI32);
4366         Value src = Peek(2, 2, ValueType::Ref(src_imm.index, kNullable));
4367         Value src_index = Peek(1, 3, kWasmI32);
4368         Value length = Peek(0, 4, kWasmI32);
4369         CALL_INTERFACE_IF_OK_AND_REACHABLE(ArrayCopy, dst, dst_index, src,
4370                                            src_index, length);
4371         Drop(5);
4372         return opcode_length + dst_imm.length + src_imm.length;
4373       }
4374       case kExprArrayInit:
4375       case kExprArrayInitStatic: {
4376         ArrayIndexImmediate<validate> array_imm(this,
4377                                                 this->pc_ + opcode_length);
4378         if (!this->Validate(this->pc_ + opcode_length, array_imm)) return 0;
4379         IndexImmediate<validate> length_imm(
4380             this, this->pc_ + opcode_length + array_imm.length,
4381             "array.init length");
4382         uint32_t elem_count = length_imm.index;
4383         if (!VALIDATE(elem_count <= kV8MaxWasmArrayInitLength)) {
4384           this->DecodeError(
4385               "Requested length %u for array.init too large, maximum is %zu",
4386               length_imm.index, kV8MaxWasmArrayInitLength);
4387           return 0;
4388         }
4389         Value rtt = opcode == kExprArrayInit
4390                         ? Peek(0, elem_count, ValueType::Rtt(array_imm.index))
4391                         : CreateValue(ValueType::Rtt(array_imm.index));
4392         if (opcode == kExprArrayInitStatic) {
4393           CALL_INTERFACE_IF_OK_AND_REACHABLE(RttCanon, array_imm.index, &rtt);
4394           Push(rtt);
4395         }
4396         ValueType element_type = array_imm.array_type->element_type();
4397         std::vector<ValueType> element_types(elem_count,
4398                                              element_type.Unpacked());
4399         FunctionSig element_sig(0, elem_count, element_types.data());
4400         ArgVector elements = PeekArgs(&element_sig, 1);
4401         Value result =
4402             CreateValue(ValueType::Ref(array_imm.index, kNonNullable));
4403         CALL_INTERFACE_IF_OK_AND_REACHABLE(ArrayInit, array_imm, elements, rtt,
4404                                            &result);
4405         Drop(elem_count + 1);
4406         Push(result);
4407         return opcode_length + array_imm.length + length_imm.length;
4408       }
4409       case kExprI31New: {
4410         NON_CONST_ONLY
4411         Value input = Peek(0, 0, kWasmI32);
4412         Value value = CreateValue(kWasmI31Ref);
4413         CALL_INTERFACE_IF_OK_AND_REACHABLE(I31New, input, &value);
4414         Drop(input);
4415         Push(value);
4416         return opcode_length;
4417       }
4418       case kExprI31GetS: {
4419         NON_CONST_ONLY
4420         Value i31 = Peek(0, 0, kWasmI31Ref);
4421         Value value = CreateValue(kWasmI32);
4422         CALL_INTERFACE_IF_OK_AND_REACHABLE(I31GetS, i31, &value);
4423         Drop(i31);
4424         Push(value);
4425         return opcode_length;
4426       }
4427       case kExprI31GetU: {
4428         NON_CONST_ONLY
4429         Value i31 = Peek(0, 0, kWasmI31Ref);
4430         Value value = CreateValue(kWasmI32);
4431         CALL_INTERFACE_IF_OK_AND_REACHABLE(I31GetU, i31, &value);
4432         Drop(i31);
4433         Push(value);
4434         return opcode_length;
4435       }
4436       case kExprRttCanon: {
4437         IndexImmediate<validate> imm(this, this->pc_ + opcode_length,
4438                                      "type index");
4439         if (!this->ValidateType(this->pc_ + opcode_length, imm)) return 0;
4440         Value value = CreateValue(ValueType::Rtt(imm.index));
4441         CALL_INTERFACE_IF_OK_AND_REACHABLE(RttCanon, imm.index, &value);
4442         Push(value);
4443         return opcode_length + imm.length;
4444       }
4445       case kExprRefTest:
4446       case kExprRefTestStatic: {
4447         NON_CONST_ONLY
4448         // "Tests whether {obj}'s runtime type is a runtime subtype of {rtt}."
4449         Value rtt = Peek(0);  // This is safe for the ...Static instruction.
4450         if (opcode == kExprRefTestStatic) {
4451           IndexImmediate<validate> imm(this, this->pc_ + opcode_length,
4452                                        "type index");
4453           if (!this->ValidateType(this->pc_ + opcode_length, imm)) return 0;
4454           opcode_length += imm.length;
4455           rtt = CreateValue(ValueType::Rtt(imm.index));
4456           CALL_INTERFACE_IF_OK_AND_REACHABLE(RttCanon, imm.index, &rtt);
4457           Push(rtt);
4458         } else {
4459           DCHECK_EQ(opcode, kExprRefTest);
4460           if (!VALIDATE(rtt.type.is_rtt() || rtt.type.is_bottom())) {
4461             PopTypeError(1, rtt, "rtt");
4462             return 0;
4463           }
4464         }
4465         Value obj = Peek(1);
4466         Value value = CreateValue(kWasmI32);
4467         if (!VALIDATE(IsSubtypeOf(obj.type, kWasmFuncRef, this->module_) ||
4468                       IsSubtypeOf(obj.type,
4469                                   ValueType::Ref(HeapType::kData, kNullable),
4470                                   this->module_) ||
4471                       obj.type.is_bottom())) {
4472           PopTypeError(0, obj, "subtype of (ref null func) or (ref null data)");
4473           return 0;
4474         }
4475         if (current_code_reachable_and_ok_) {
4476           // This logic ensures that code generation can assume that functions
4477           // can only be cast to function types, and data objects to data types.
4478           if (V8_UNLIKELY(TypeCheckAlwaysSucceeds(obj, rtt))) {
4479             // Drop rtt.
4480             CALL_INTERFACE(Drop);
4481             // Type checking can still fail for null.
4482             if (obj.type.is_nullable()) {
4483               // We abuse ref.as_non_null, which isn't otherwise used as a unary
4484               // operator, as a sentinel for the negation of ref.is_null.
4485               CALL_INTERFACE(UnOp, kExprRefAsNonNull, obj, &value);
4486             } else {
4487               CALL_INTERFACE(Drop);
4488               CALL_INTERFACE(I32Const, &value, 1);
4489             }
4490           } else if (V8_UNLIKELY(TypeCheckAlwaysFails(obj, rtt))) {
4491             CALL_INTERFACE(Drop);
4492             CALL_INTERFACE(Drop);
4493             CALL_INTERFACE(I32Const, &value, 0);
4494           } else {
4495             CALL_INTERFACE(RefTest, obj, rtt, &value);
4496           }
4497         }
4498         Drop(2);
4499         Push(value);
4500         return opcode_length;
4501       }
4502       case kExprRefCast:
4503       case kExprRefCastStatic: {
4504         NON_CONST_ONLY
4505         Value rtt = Peek(0);  // This is safe for the ...Static instruction.
4506         if (opcode == kExprRefCastStatic) {
4507           IndexImmediate<validate> imm(this, this->pc_ + opcode_length,
4508                                        "type index");
4509           if (!this->ValidateType(this->pc_ + opcode_length, imm)) return 0;
4510           opcode_length += imm.length;
4511           rtt = CreateValue(ValueType::Rtt(imm.index));
4512           CALL_INTERFACE_IF_OK_AND_REACHABLE(RttCanon, imm.index, &rtt);
4513           Push(rtt);
4514         } else {
4515           DCHECK_EQ(opcode, kExprRefCast);
4516           if (!VALIDATE(rtt.type.is_rtt() || rtt.type.is_bottom())) {
4517             PopTypeError(1, rtt, "rtt");
4518             return 0;
4519           }
4520         }
4521         Value obj = Peek(1);
4522         if (!VALIDATE(IsSubtypeOf(obj.type, kWasmFuncRef, this->module_) ||
4523                       IsSubtypeOf(obj.type,
4524                                   ValueType::Ref(HeapType::kData, kNullable),
4525                                   this->module_) ||
4526                       obj.type.is_bottom())) {
4527           PopTypeError(0, obj, "subtype of (ref null func) or (ref null data)");
4528           return 0;
4529         }
4530         // If either value is bottom, we emit the most specific type possible.
4531         Value value =
4532             CreateValue(rtt.type.is_bottom()
4533                             ? kWasmBottom
4534                             : ValueType::Ref(rtt.type.ref_index(),
4535                                              obj.type.is_bottom()
4536                                                  ? kNonNullable
4537                                                  : obj.type.nullability()));
4538         if (current_code_reachable_and_ok_) {
4539           // This logic ensures that code generation can assume that functions
4540           // can only be cast to function types, and data objects to data types.
4541           if (V8_UNLIKELY(TypeCheckAlwaysSucceeds(obj, rtt))) {
4542             // Drop the rtt from the stack, then forward the object value to the
4543             // result.
4544             CALL_INTERFACE(Drop);
4545             CALL_INTERFACE(Forward, obj, &value);
4546           } else if (V8_UNLIKELY(TypeCheckAlwaysFails(obj, rtt))) {
4547             // Unrelated types. The only way this will not trap is if the object
4548             // is null.
4549             if (obj.type.is_nullable()) {
4550               // Drop rtt from the stack, then assert that obj is null.
4551               CALL_INTERFACE(Drop);
4552               CALL_INTERFACE(AssertNull, obj, &value);
4553             } else {
4554               CALL_INTERFACE(Trap, TrapReason::kTrapIllegalCast);
4555               // We know that the following code is not reachable, but according
4556               // to the spec it technically is. Set it to spec-only reachable.
4557               SetSucceedingCodeDynamicallyUnreachable();
4558             }
4559           } else {
4560             CALL_INTERFACE(RefCast, obj, rtt, &value);
4561           }
4562         }
4563         Drop(2);
4564         Push(value);
4565         return opcode_length;
4566       }
4567       case kExprBrOnCast:
4568       case kExprBrOnCastStatic: {
4569         NON_CONST_ONLY
4570         BranchDepthImmediate<validate> branch_depth(this,
4571                                                     this->pc_ + opcode_length);
4572         if (!this->Validate(this->pc_ + opcode_length, branch_depth,
4573                             control_.size())) {
4574           return 0;
4575         }
4576         uint32_t pc_offset = opcode_length + branch_depth.length;
4577         Value rtt = Peek(0);  // This is safe for the ...Static instruction.
4578         if (opcode == kExprBrOnCastStatic) {
4579           IndexImmediate<validate> imm(this, this->pc_ + pc_offset,
4580                                        "type index");
4581           if (!this->ValidateType(this->pc_ + opcode_length, imm)) return 0;
4582           pc_offset += imm.length;
4583           rtt = CreateValue(ValueType::Rtt(imm.index));
4584           CALL_INTERFACE_IF_OK_AND_REACHABLE(RttCanon, imm.index, &rtt);
4585           Push(rtt);
4586         } else {
4587           DCHECK_EQ(opcode, kExprBrOnCast);
4588           if (!VALIDATE(rtt.type.is_rtt() || rtt.type.is_bottom())) {
4589             PopTypeError(1, rtt, "rtt");
4590             return 0;
4591           }
4592         }
4593         Value obj = Peek(1);
4594         if (!VALIDATE(IsSubtypeOf(obj.type, kWasmFuncRef, this->module_) ||
4595                       IsSubtypeOf(obj.type,
4596                                   ValueType::Ref(HeapType::kData, kNullable),
4597                                   this->module_) ||
4598                       obj.type.is_bottom())) {
4599           PopTypeError(0, obj, "subtype of (ref null func) or (ref null data)");
4600           return 0;
4601         }
4602         Control* c = control_at(branch_depth.depth);
4603         if (c->br_merge()->arity == 0) {
4604           this->DecodeError(
4605               "br_on_cast must target a branch of arity at least 1");
4606           return 0;
4607         }
4608         // Attention: contrary to most other instructions, we modify the
4609         // stack before calling the interface function. This makes it
4610         // significantly more convenient to pass around the values that
4611         // will be on the stack when the branch is taken.
4612         // TODO(jkummerow): Reconsider this choice.
4613         Drop(2);  // {obj} and {rtt}.
4614         Value result_on_branch = CreateValue(
4615             rtt.type.is_bottom()
4616                 ? kWasmBottom
4617                 : ValueType::Ref(rtt.type.ref_index(), kNonNullable));
4618         Push(result_on_branch);
4619         // The {value_on_branch} parameter we pass to the interface must
4620         // be pointer-identical to the object on the stack, so we can't
4621         // reuse {result_on_branch} which was passed-by-value to {Push}.
4622         Value* value_on_branch = stack_value(1);
4623         if (!VALIDATE(TypeCheckBranch<true>(c, 0))) return 0;
4624         if (V8_LIKELY(current_code_reachable_and_ok_)) {
4625           // This logic ensures that code generation can assume that functions
4626           // can only be cast to function types, and data objects to data types.
4627           if (V8_UNLIKELY(TypeCheckAlwaysSucceeds(obj, rtt))) {
4628             CALL_INTERFACE(Drop);  // rtt
4629             CALL_INTERFACE(Forward, obj, value_on_branch);
4630             // The branch will still not be taken on null.
4631             if (obj.type.is_nullable()) {
4632               CALL_INTERFACE(BrOnNonNull, obj, branch_depth.depth);
4633             } else {
4634               CALL_INTERFACE(BrOrRet, branch_depth.depth, 0);
4635               // We know that the following code is not reachable, but according
4636               // to the spec it technically is. Set it to spec-only reachable.
4637               SetSucceedingCodeDynamicallyUnreachable();
4638             }
4639             c->br_merge()->reached = true;
4640           } else if (V8_LIKELY(!TypeCheckAlwaysFails(obj, rtt))) {
4641             CALL_INTERFACE(BrOnCast, obj, rtt, value_on_branch,
4642                            branch_depth.depth);
4643             c->br_merge()->reached = true;
4644           }
4645           // Otherwise the types are unrelated. Do not branch.
4646         }
4647 
4648         Drop(result_on_branch);
4649         Push(obj);  // Restore stack state on fallthrough.
4650         return pc_offset;
4651       }
4652       case kExprBrOnCastFail:
4653       case kExprBrOnCastStaticFail: {
4654         NON_CONST_ONLY
4655         BranchDepthImmediate<validate> branch_depth(this,
4656                                                     this->pc_ + opcode_length);
4657         if (!this->Validate(this->pc_ + opcode_length, branch_depth,
4658                             control_.size())) {
4659           return 0;
4660         }
4661         uint32_t pc_offset = opcode_length + branch_depth.length;
4662         Value rtt = Peek(0);  // This is safe for the ...Static instruction.
4663         if (opcode == kExprBrOnCastStaticFail) {
4664           IndexImmediate<validate> imm(this, this->pc_ + pc_offset,
4665                                        "type index");
4666           if (!this->ValidateType(this->pc_ + opcode_length, imm)) return 0;
4667           pc_offset += imm.length;
4668           rtt = CreateValue(ValueType::Rtt(imm.index));
4669           CALL_INTERFACE_IF_OK_AND_REACHABLE(RttCanon, imm.index, &rtt);
4670           Push(rtt);
4671         } else {
4672           DCHECK_EQ(opcode, kExprBrOnCastFail);
4673           if (!VALIDATE(rtt.type.is_rtt() || rtt.type.is_bottom())) {
4674             PopTypeError(1, rtt, "rtt");
4675             return 0;
4676           }
4677         }
4678         Value obj = Peek(1);
4679         if (!VALIDATE(IsSubtypeOf(obj.type, kWasmFuncRef, this->module_) ||
4680                       IsSubtypeOf(obj.type,
4681                                   ValueType::Ref(HeapType::kData, kNullable),
4682                                   this->module_) ||
4683                       obj.type.is_bottom())) {
4684           PopTypeError(0, obj, "subtype of (ref null func) or (ref null data)");
4685           return 0;
4686         }
4687         Control* c = control_at(branch_depth.depth);
4688         if (c->br_merge()->arity == 0) {
4689           this->DecodeError(
4690               "br_on_cast_fail must target a branch of arity at least 1");
4691           return 0;
4692         }
4693         // Attention: contrary to most other instructions, we modify the stack
4694         // before calling the interface function. This makes it significantly
4695         // more convenient to pass around the values that will be on the stack
4696         // when the branch is taken. In this case, we leave {obj} on the stack
4697         // to type check the branch.
4698         // TODO(jkummerow): Reconsider this choice.
4699         Drop(rtt);
4700         if (!VALIDATE(TypeCheckBranch<true>(c, 0))) return 0;
4701         Value result_on_fallthrough = CreateValue(
4702             rtt.type.is_bottom()
4703                 ? kWasmBottom
4704                 : ValueType::Ref(rtt.type.ref_index(), kNonNullable));
4705         if (V8_LIKELY(current_code_reachable_and_ok_)) {
4706           // This logic ensures that code generation can assume that functions
4707           // can only be cast to function types, and data objects to data types.
4708           if (V8_UNLIKELY(TypeCheckAlwaysFails(obj, rtt))) {
4709             // Drop {rtt} in the interface.
4710             CALL_INTERFACE(Drop);
4711             // Otherwise the types are unrelated. Always branch.
4712             CALL_INTERFACE(BrOrRet, branch_depth.depth, 0);
4713             // We know that the following code is not reachable, but according
4714             // to the spec it technically is. Set it to spec-only reachable.
4715             SetSucceedingCodeDynamicallyUnreachable();
4716             c->br_merge()->reached = true;
4717           } else if (V8_UNLIKELY(TypeCheckAlwaysSucceeds(obj, rtt))) {
4718             // Drop {rtt} in the interface.
4719             CALL_INTERFACE(Drop);
4720             // The branch can still be taken on null.
4721             if (obj.type.is_nullable()) {
4722               CALL_INTERFACE(BrOnNull, obj, branch_depth.depth, true,
4723                              &result_on_fallthrough);
4724               c->br_merge()->reached = true;
4725             } else {
4726               // Drop {obj} in the interface.
4727               CALL_INTERFACE(Drop);
4728             }
4729           } else {
4730             CALL_INTERFACE(BrOnCastFail, obj, rtt, &result_on_fallthrough,
4731                            branch_depth.depth);
4732             c->br_merge()->reached = true;
4733           }
4734           // Otherwise, the type check always succeeds. Do not branch.
4735         }
4736         // Make sure the correct value is on the stack state on fallthrough.
4737         Drop(obj);
4738         Push(result_on_fallthrough);
4739         return pc_offset;
4740       }
4741 #define ABSTRACT_TYPE_CHECK(h_type)                                            \
4742   case kExprRefIs##h_type: {                                                   \
4743     NON_CONST_ONLY                                                             \
4744     Value arg = Peek(0, 0, kWasmAnyRef);                                       \
4745     if (this->failed()) return 0;                                              \
4746     Value result = CreateValue(kWasmI32);                                      \
4747     if (V8_LIKELY(current_code_reachable_and_ok_)) {                           \
4748       if (IsHeapSubtypeOf(arg.type.heap_representation(), HeapType::k##h_type, \
4749                           this->module_)) {                                    \
4750         if (arg.type.is_nullable()) {                                          \
4751           /* We abuse ref.as_non_null, which isn't otherwise used as a unary   \
4752            * operator, as a sentinel for the negation of ref.is_null. */       \
4753           CALL_INTERFACE(UnOp, kExprRefAsNonNull, arg, &result);               \
4754         } else {                                                               \
4755           CALL_INTERFACE(Drop);                                                \
4756           CALL_INTERFACE(I32Const, &result, 1);                                \
4757         }                                                                      \
4758       } else if (!IsHeapSubtypeOf(HeapType::k##h_type,                         \
4759                                   arg.type.heap_representation(),              \
4760                                   this->module_)) {                            \
4761         CALL_INTERFACE(Drop);                                                  \
4762         CALL_INTERFACE(I32Const, &result, 0);                                  \
4763       } else {                                                                 \
4764         CALL_INTERFACE(RefIs##h_type, arg, &result);                           \
4765       }                                                                        \
4766     }                                                                          \
4767     Drop(arg);                                                                 \
4768     Push(result);                                                              \
4769     return opcode_length;                                                      \
4770   }
4771         ABSTRACT_TYPE_CHECK(Data)
4772         ABSTRACT_TYPE_CHECK(Func)
4773         ABSTRACT_TYPE_CHECK(I31)
4774         ABSTRACT_TYPE_CHECK(Array)
4775 #undef ABSTRACT_TYPE_CHECK
4776 
4777 #define ABSTRACT_TYPE_CAST(h_type)                                             \
4778   case kExprRefAs##h_type: {                                                   \
4779     NON_CONST_ONLY                                                             \
4780     Value arg = Peek(0, 0, kWasmAnyRef);                                       \
4781     ValueType non_nullable_abstract_type =                                     \
4782         ValueType::Ref(HeapType::k##h_type, kNonNullable);                     \
4783     Value result = CreateValue(non_nullable_abstract_type);                    \
4784     if (V8_LIKELY(current_code_reachable_and_ok_)) {                           \
4785       if (IsHeapSubtypeOf(arg.type.heap_representation(), HeapType::k##h_type, \
4786                           this->module_)) {                                    \
4787         if (arg.type.is_nullable()) {                                          \
4788           CALL_INTERFACE(RefAsNonNull, arg, &result);                          \
4789         } else {                                                               \
4790           CALL_INTERFACE(Forward, arg, &result);                               \
4791         }                                                                      \
4792       } else if (!IsHeapSubtypeOf(HeapType::k##h_type,                         \
4793                                   arg.type.heap_representation(),              \
4794                                   this->module_)) {                            \
4795         CALL_INTERFACE(Trap, TrapReason::kTrapIllegalCast);                    \
4796         /* We know that the following code is not reachable, but according */  \
4797         /* to the spec it technically is. Set it to spec-only reachable. */    \
4798         SetSucceedingCodeDynamicallyUnreachable();                             \
4799       } else {                                                                 \
4800         CALL_INTERFACE(RefAs##h_type, arg, &result);                           \
4801       }                                                                        \
4802     }                                                                          \
4803     Drop(arg);                                                                 \
4804     Push(result);                                                              \
4805     return opcode_length;                                                      \
4806   }
4807         ABSTRACT_TYPE_CAST(Data)
4808         ABSTRACT_TYPE_CAST(Func)
4809         ABSTRACT_TYPE_CAST(I31)
4810         ABSTRACT_TYPE_CAST(Array)
4811 #undef ABSTRACT_TYPE_CAST
4812 
4813       case kExprBrOnData:
4814       case kExprBrOnFunc:
4815       case kExprBrOnArray:
4816       case kExprBrOnI31: {
4817         NON_CONST_ONLY
4818         BranchDepthImmediate<validate> branch_depth(this,
4819                                                     this->pc_ + opcode_length);
4820         if (!this->Validate(this->pc_ + opcode_length, branch_depth,
4821                             control_.size())) {
4822           return 0;
4823         }
4824 
4825         Control* c = control_at(branch_depth.depth);
4826         if (c->br_merge()->arity == 0) {
4827           this->DecodeError("%s must target a branch of arity at least 1",
4828                             SafeOpcodeNameAt(this->pc_));
4829           return 0;
4830         }
4831 
4832         // Attention: contrary to most other instructions, we modify the
4833         // stack before calling the interface function. This makes it
4834         // significantly more convenient to pass around the values that
4835         // will be on the stack when the branch is taken.
4836         // TODO(jkummerow): Reconsider this choice.
4837         Value obj = Peek(0, 0, kWasmAnyRef);
4838         Drop(obj);
4839         HeapType::Representation heap_type =
4840             opcode == kExprBrOnFunc
4841                 ? HeapType::kFunc
4842                 : opcode == kExprBrOnData
4843                       ? HeapType::kData
4844                       : opcode == kExprBrOnArray ? HeapType::kArray
4845                                                  : HeapType::kI31;
4846         Value result_on_branch =
4847             CreateValue(ValueType::Ref(heap_type, kNonNullable));
4848         Push(result_on_branch);
4849         if (!VALIDATE(TypeCheckBranch<true>(c, 0))) return 0;
4850         // The {value_on_branch} parameter we pass to the interface must be
4851         // pointer-identical to the object on the stack, so we can't reuse
4852         // {result_on_branch} which was passed-by-value to {Push}.
4853         Value* value_on_branch = stack_value(1);
4854         if (V8_LIKELY(current_code_reachable_and_ok_)) {
4855           if (opcode == kExprBrOnFunc) {
4856             CALL_INTERFACE(BrOnFunc, obj, value_on_branch, branch_depth.depth);
4857           } else if (opcode == kExprBrOnData) {
4858             CALL_INTERFACE(BrOnData, obj, value_on_branch, branch_depth.depth);
4859           } else if (opcode == kExprBrOnArray) {
4860             CALL_INTERFACE(BrOnArray, obj, value_on_branch, branch_depth.depth);
4861           } else {
4862             CALL_INTERFACE(BrOnI31, obj, value_on_branch, branch_depth.depth);
4863           }
4864           c->br_merge()->reached = true;
4865         }
4866         Drop(result_on_branch);
4867         Push(obj);  // Restore stack state on fallthrough.
4868         return opcode_length + branch_depth.length;
4869       }
4870       case kExprBrOnNonData:
4871       case kExprBrOnNonFunc:
4872       case kExprBrOnNonArray:
4873       case kExprBrOnNonI31: {
4874         NON_CONST_ONLY
4875         BranchDepthImmediate<validate> branch_depth(this,
4876                                                     this->pc_ + opcode_length);
4877         if (!this->Validate(this->pc_ + opcode_length, branch_depth,
4878                             control_.size())) {
4879           return 0;
4880         }
4881 
4882         Control* c = control_at(branch_depth.depth);
4883         if (c->br_merge()->arity == 0) {
4884           this->DecodeError("%s must target a branch of arity at least 1",
4885                             SafeOpcodeNameAt(this->pc_));
4886           return 0;
4887         }
4888         if (!VALIDATE(TypeCheckBranch<true>(c, 0))) return 0;
4889 
4890         Value obj = Peek(0, 0, kWasmAnyRef);
4891         HeapType::Representation heap_type =
4892             opcode == kExprBrOnNonFunc
4893                 ? HeapType::kFunc
4894                 : opcode == kExprBrOnNonData
4895                       ? HeapType::kData
4896                       : opcode == kExprBrOnNonArray ? HeapType::kArray
4897                                                     : HeapType::kI31;
4898         Value value_on_fallthrough =
4899             CreateValue(ValueType::Ref(heap_type, kNonNullable));
4900 
4901         if (V8_LIKELY(current_code_reachable_and_ok_)) {
4902           if (opcode == kExprBrOnNonFunc) {
4903             CALL_INTERFACE(BrOnNonFunc, obj, &value_on_fallthrough,
4904                            branch_depth.depth);
4905           } else if (opcode == kExprBrOnNonData) {
4906             CALL_INTERFACE(BrOnNonData, obj, &value_on_fallthrough,
4907                            branch_depth.depth);
4908           } else if (opcode == kExprBrOnNonArray) {
4909             CALL_INTERFACE(BrOnNonArray, obj, &value_on_fallthrough,
4910                            branch_depth.depth);
4911           } else {
4912             CALL_INTERFACE(BrOnNonI31, obj, &value_on_fallthrough,
4913                            branch_depth.depth);
4914           }
4915           c->br_merge()->reached = true;
4916         }
4917         Drop(obj);
4918         Push(value_on_fallthrough);
4919         return opcode_length + branch_depth.length;
4920       }
4921       default:
4922         this->DecodeError("invalid gc opcode: %x", opcode);
4923         return 0;
4924     }
4925   }
4926 #undef NON_CONST_ONLY
4927 
DecodeAtomicOpcode(WasmOpcode opcode, uint32_t opcode_length)4928   uint32_t DecodeAtomicOpcode(WasmOpcode opcode, uint32_t opcode_length) {
4929     ValueType ret_type;
4930     const FunctionSig* sig = WasmOpcodes::Signature(opcode);
4931     if (!VALIDATE(sig != nullptr)) {
4932       this->DecodeError("invalid atomic opcode");
4933       return 0;
4934     }
4935     MachineType memtype;
4936     switch (opcode) {
4937 #define CASE_ATOMIC_STORE_OP(Name, Type)          \
4938   case kExpr##Name: {                             \
4939     memtype = MachineType::Type();                \
4940     ret_type = kWasmVoid;                         \
4941     break; /* to generic mem access code below */ \
4942   }
4943       ATOMIC_STORE_OP_LIST(CASE_ATOMIC_STORE_OP)
4944 #undef CASE_ATOMIC_OP
4945 #define CASE_ATOMIC_OP(Name, Type)                \
4946   case kExpr##Name: {                             \
4947     memtype = MachineType::Type();                \
4948     ret_type = GetReturnType(sig);                \
4949     break; /* to generic mem access code below */ \
4950   }
4951       ATOMIC_OP_LIST(CASE_ATOMIC_OP)
4952 #undef CASE_ATOMIC_OP
4953       case kExprAtomicFence: {
4954         byte zero =
4955             this->template read_u8<validate>(this->pc_ + opcode_length, "zero");
4956         if (!VALIDATE(zero == 0)) {
4957           this->DecodeError(this->pc_ + opcode_length,
4958                             "invalid atomic operand");
4959           return 0;
4960         }
4961         CALL_INTERFACE_IF_OK_AND_REACHABLE(AtomicFence);
4962         return 1 + opcode_length;
4963       }
4964       default:
4965         this->DecodeError("invalid atomic opcode");
4966         return 0;
4967     }
4968 
4969     MemoryAccessImmediate<validate> imm = MakeMemoryAccessImmediate(
4970         opcode_length, ElementSizeLog2Of(memtype.representation()));
4971     if (!this->Validate(this->pc_ + opcode_length, imm)) return false;
4972 
4973     // TODO(10949): Fix this for memory64 (index type should be kWasmI64
4974     // then).
4975     CHECK(!this->module_->is_memory64);
4976     ArgVector args = PeekArgs(sig);
4977     if (ret_type == kWasmVoid) {
4978       CALL_INTERFACE_IF_OK_AND_REACHABLE(AtomicOp, opcode, base::VectorOf(args),
4979                                          imm, nullptr);
4980       DropArgs(sig);
4981     } else {
4982       Value result = CreateValue(GetReturnType(sig));
4983       CALL_INTERFACE_IF_OK_AND_REACHABLE(AtomicOp, opcode, base::VectorOf(args),
4984                                          imm, &result);
4985       DropArgs(sig);
4986       Push(result);
4987     }
4988     return opcode_length + imm.length;
4989   }
4990 
DecodeNumericOpcode(WasmOpcode opcode, uint32_t opcode_length)4991   unsigned DecodeNumericOpcode(WasmOpcode opcode, uint32_t opcode_length) {
4992     const FunctionSig* sig = WasmOpcodes::Signature(opcode);
4993     switch (opcode) {
4994       case kExprI32SConvertSatF32:
4995       case kExprI32UConvertSatF32:
4996       case kExprI32SConvertSatF64:
4997       case kExprI32UConvertSatF64:
4998       case kExprI64SConvertSatF32:
4999       case kExprI64UConvertSatF32:
5000       case kExprI64SConvertSatF64:
5001       case kExprI64UConvertSatF64: {
5002         BuildSimpleOperator(opcode, sig);
5003         return opcode_length;
5004       }
5005       case kExprMemoryInit: {
5006         MemoryInitImmediate<validate> imm(this, this->pc_ + opcode_length);
5007         if (!this->Validate(this->pc_ + opcode_length, imm)) return 0;
5008         ValueType mem_type = this->module_->is_memory64 ? kWasmI64 : kWasmI32;
5009         Value size = Peek(0, 2, kWasmI32);
5010         Value offset = Peek(1, 1, kWasmI32);
5011         Value dst = Peek(2, 0, mem_type);
5012         CALL_INTERFACE_IF_OK_AND_REACHABLE(MemoryInit, imm, dst, offset, size);
5013         Drop(3);
5014         return opcode_length + imm.length;
5015       }
5016       case kExprDataDrop: {
5017         IndexImmediate<validate> imm(this, this->pc_ + opcode_length,
5018                                      "data segment index");
5019         if (!this->ValidateDataSegment(this->pc_ + opcode_length, imm)) {
5020           return 0;
5021         }
5022         CALL_INTERFACE_IF_OK_AND_REACHABLE(DataDrop, imm);
5023         return opcode_length + imm.length;
5024       }
5025       case kExprMemoryCopy: {
5026         MemoryCopyImmediate<validate> imm(this, this->pc_ + opcode_length);
5027         if (!this->Validate(this->pc_ + opcode_length, imm)) return 0;
5028         ValueType mem_type = this->module_->is_memory64 ? kWasmI64 : kWasmI32;
5029         Value size = Peek(0, 2, mem_type);
5030         Value src = Peek(1, 1, mem_type);
5031         Value dst = Peek(2, 0, mem_type);
5032         CALL_INTERFACE_IF_OK_AND_REACHABLE(MemoryCopy, imm, dst, src, size);
5033         Drop(3);
5034         return opcode_length + imm.length;
5035       }
5036       case kExprMemoryFill: {
5037         MemoryIndexImmediate<validate> imm(this, this->pc_ + opcode_length);
5038         if (!this->Validate(this->pc_ + opcode_length, imm)) return 0;
5039         ValueType mem_type = this->module_->is_memory64 ? kWasmI64 : kWasmI32;
5040         Value size = Peek(0, 2, mem_type);
5041         Value value = Peek(1, 1, kWasmI32);
5042         Value dst = Peek(2, 0, mem_type);
5043         CALL_INTERFACE_IF_OK_AND_REACHABLE(MemoryFill, imm, dst, value, size);
5044         Drop(3);
5045         return opcode_length + imm.length;
5046       }
5047       case kExprTableInit: {
5048         TableInitImmediate<validate> imm(this, this->pc_ + opcode_length);
5049         if (!this->Validate(this->pc_ + opcode_length, imm)) return 0;
5050         ArgVector args = PeekArgs(sig);
5051         CALL_INTERFACE_IF_OK_AND_REACHABLE(TableInit, imm,
5052                                            base::VectorOf(args));
5053         DropArgs(sig);
5054         return opcode_length + imm.length;
5055       }
5056       case kExprElemDrop: {
5057         IndexImmediate<validate> imm(this, this->pc_ + opcode_length,
5058                                      "element segment index");
5059         if (!this->ValidateElementSegment(this->pc_ + opcode_length, imm)) {
5060           return 0;
5061         }
5062         CALL_INTERFACE_IF_OK_AND_REACHABLE(ElemDrop, imm);
5063         return opcode_length + imm.length;
5064       }
5065       case kExprTableCopy: {
5066         TableCopyImmediate<validate> imm(this, this->pc_ + opcode_length);
5067         if (!this->Validate(this->pc_ + opcode_length, imm)) return 0;
5068         ArgVector args = PeekArgs(sig);
5069         CALL_INTERFACE_IF_OK_AND_REACHABLE(TableCopy, imm,
5070                                            base::VectorOf(args));
5071         DropArgs(sig);
5072         return opcode_length + imm.length;
5073       }
5074       case kExprTableGrow: {
5075         IndexImmediate<validate> imm(this, this->pc_ + opcode_length,
5076                                      "table index");
5077         if (!this->ValidateTable(this->pc_ + opcode_length, imm)) return 0;
5078         Value delta = Peek(0, 1, kWasmI32);
5079         Value value = Peek(1, 0, this->module_->tables[imm.index].type);
5080         Value result = CreateValue(kWasmI32);
5081         CALL_INTERFACE_IF_OK_AND_REACHABLE(TableGrow, imm, value, delta,
5082                                            &result);
5083         Drop(2);
5084         Push(result);
5085         return opcode_length + imm.length;
5086       }
5087       case kExprTableSize: {
5088         IndexImmediate<validate> imm(this, this->pc_ + opcode_length,
5089                                      "table index");
5090         if (!this->ValidateTable(this->pc_ + opcode_length, imm)) return 0;
5091         Value result = CreateValue(kWasmI32);
5092         CALL_INTERFACE_IF_OK_AND_REACHABLE(TableSize, imm, &result);
5093         Push(result);
5094         return opcode_length + imm.length;
5095       }
5096       case kExprTableFill: {
5097         IndexImmediate<validate> imm(this, this->pc_ + opcode_length,
5098                                      "table index");
5099         if (!this->ValidateTable(this->pc_ + opcode_length, imm)) return 0;
5100         Value count = Peek(0, 2, kWasmI32);
5101         Value value = Peek(1, 1, this->module_->tables[imm.index].type);
5102         Value start = Peek(2, 0, kWasmI32);
5103         CALL_INTERFACE_IF_OK_AND_REACHABLE(TableFill, imm, start, value, count);
5104         Drop(3);
5105         return opcode_length + imm.length;
5106       }
5107       default:
5108         this->DecodeError("invalid numeric opcode");
5109         return 0;
5110     }
5111   }
5112 
EnsureStackSpace(int slots_needed)5113   V8_INLINE void EnsureStackSpace(int slots_needed) {
5114     if (V8_LIKELY(stack_capacity_end_ - stack_end_ >= slots_needed)) return;
5115     GrowStackSpace(slots_needed);
5116   }
5117 
GrowStackSpace(int slots_needed)5118   V8_NOINLINE void GrowStackSpace(int slots_needed) {
5119     size_t new_stack_capacity =
5120         std::max(size_t{8},
5121                  base::bits::RoundUpToPowerOfTwo(stack_size() + slots_needed));
5122     Value* new_stack =
5123         this->zone()->template NewArray<Value>(new_stack_capacity);
5124     if (stack_) {
5125       std::copy(stack_, stack_end_, new_stack);
5126       this->zone()->DeleteArray(stack_, stack_capacity_end_ - stack_);
5127     }
5128     stack_end_ = new_stack + (stack_end_ - stack_);
5129     stack_ = new_stack;
5130     stack_capacity_end_ = new_stack + new_stack_capacity;
5131   }
5132 
CreateValue(ValueType type)5133   V8_INLINE Value CreateValue(ValueType type) { return Value{this->pc_, type}; }
Push(Value value)5134   V8_INLINE void Push(Value value) {
5135     DCHECK_NE(kWasmVoid, value.type);
5136     // {EnsureStackSpace} should have been called before, either in the central
5137     // decoding loop, or individually if more than one element is pushed.
5138     DCHECK_GT(stack_capacity_end_, stack_end_);
5139     *stack_end_ = value;
5140     ++stack_end_;
5141   }
5142 
PushMergeValues(Control* c, Merge<Value>* merge)5143   void PushMergeValues(Control* c, Merge<Value>* merge) {
5144     if (decoding_mode == kInitExpression) return;
5145     DCHECK_EQ(c, &control_.back());
5146     DCHECK(merge == &c->start_merge || merge == &c->end_merge);
5147     DCHECK_LE(stack_ + c->stack_depth, stack_end_);
5148     stack_end_ = stack_ + c->stack_depth;
5149     if (merge->arity == 1) {
5150       // {EnsureStackSpace} should have been called before in the central
5151       // decoding loop.
5152       DCHECK_GT(stack_capacity_end_, stack_end_);
5153       *stack_end_++ = merge->vals.first;
5154     } else {
5155       EnsureStackSpace(merge->arity);
5156       for (uint32_t i = 0; i < merge->arity; i++) {
5157         *stack_end_++ = merge->vals.array[i];
5158       }
5159     }
5160     DCHECK_EQ(c->stack_depth + merge->arity, stack_size());
5161   }
5162 
CreateReturnValues(const FunctionSig* sig)5163   V8_INLINE ReturnVector CreateReturnValues(const FunctionSig* sig) {
5164     size_t return_count = sig->return_count();
5165     ReturnVector values(return_count);
5166     std::transform(sig->returns().begin(), sig->returns().end(), values.begin(),
5167                    [this](ValueType type) { return CreateValue(type); });
5168     return values;
5169   }
PushReturns(ReturnVector values)5170   V8_INLINE void PushReturns(ReturnVector values) {
5171     EnsureStackSpace(static_cast<int>(values.size()));
5172     for (Value& value : values) Push(value);
5173   }
5174 
5175   // We do not inline these functions because doing so causes a large binary
5176   // size increase. Not inlining them should not create a performance
5177   // degradation, because their invocations are guarded by V8_LIKELY.
PopTypeError(int index, Value val, const char* expected)5178   V8_NOINLINE void PopTypeError(int index, Value val, const char* expected) {
5179     this->DecodeError(val.pc(), "%s[%d] expected %s, found %s of type %s",
5180                       SafeOpcodeNameAt(this->pc_), index, expected,
5181                       SafeOpcodeNameAt(val.pc()), val.type.name().c_str());
5182   }
5183 
PopTypeError(int index, Value val, std::string expected)5184   V8_NOINLINE void PopTypeError(int index, Value val, std::string expected) {
5185     PopTypeError(index, val, expected.c_str());
5186   }
5187 
PopTypeError(int index, Value val, ValueType expected)5188   V8_NOINLINE void PopTypeError(int index, Value val, ValueType expected) {
5189     PopTypeError(index, val, ("type " + expected.name()).c_str());
5190   }
5191 
NotEnoughArgumentsError(int needed, int actual)5192   V8_NOINLINE void NotEnoughArgumentsError(int needed, int actual) {
5193     DCHECK_LT(0, needed);
5194     DCHECK_LE(0, actual);
5195     DCHECK_LT(actual, needed);
5196     this->DecodeError(
5197         "not enough arguments on the stack for %s (need %d, got %d)",
5198         SafeOpcodeNameAt(this->pc_), needed, actual);
5199   }
5200 
Peek(int depth, int index, ValueType expected)5201   V8_INLINE Value Peek(int depth, int index, ValueType expected) {
5202     Value val = Peek(depth);
5203     if (!VALIDATE(IsSubtypeOf(val.type, expected, this->module_) ||
5204                   val.type == kWasmBottom || expected == kWasmBottom)) {
5205       PopTypeError(index, val, expected);
5206     }
5207     return val;
5208   }
5209 
Peek(int depth)5210   V8_INLINE Value Peek(int depth) {
5211     DCHECK(!control_.empty());
5212     uint32_t limit = control_.back().stack_depth;
5213     if (V8_UNLIKELY(stack_size() <= limit + depth)) {
5214       // Peeking past the current control start in reachable code.
5215       if (!VALIDATE(decoding_mode == kFunctionBody &&
5216                     control_.back().unreachable())) {
5217         NotEnoughArgumentsError(depth + 1, stack_size() - limit);
5218       }
5219       return UnreachableValue(this->pc_);
5220     }
5221     DCHECK_LE(stack_, stack_end_ - depth - 1);
5222     return *(stack_end_ - depth - 1);
5223   }
5224 
ValidateArgType(ArgVector args, int index, ValueType expected)5225   V8_INLINE void ValidateArgType(ArgVector args, int index,
5226                                  ValueType expected) {
5227     Value val = args[index];
5228     if (!VALIDATE(IsSubtypeOf(val.type, expected, this->module_) ||
5229                   val.type == kWasmBottom || expected == kWasmBottom)) {
5230       PopTypeError(index, val, expected);
5231     }
5232   }
5233 
5234   // Drop the top {count} stack elements, or all of them if less than {count}
5235   // are present.
Drop(int count = 1)5236   V8_INLINE void Drop(int count = 1) {
5237     DCHECK(!control_.empty());
5238     uint32_t limit = control_.back().stack_depth;
5239     if (V8_UNLIKELY(stack_size() < limit + count)) {
5240       // Pop what we can.
5241       count = std::min(count, static_cast<int>(stack_size() - limit));
5242     }
5243     DCHECK_LE(stack_, stack_end_ - count);
5244     stack_end_ -= count;
5245   }
5246   // Drop the top stack element if present. Takes a Value input for more
5247   // descriptive call sites.
Drop(const Value& )5248   V8_INLINE void Drop(const Value& /* unused */) { Drop(1); }
5249 
5250   enum StackElementsCountMode : bool {
5251     kNonStrictCounting = false,
5252     kStrictCounting = true
5253   };
5254 
5255   enum MergeType {
5256     kBranchMerge,
5257     kReturnMerge,
5258     kFallthroughMerge,
5259     kInitExprMerge
5260   };
5261 
5262   // - If the current code is reachable, check if the current stack values are
5263   //   compatible with {merge} based on their number and types. Disregard the
5264   //   first {drop_values} on the stack. If {strict_count}, check that
5265   //   #(stack elements) == {merge->arity}, otherwise
5266   //   #(stack elements) >= {merge->arity}.
5267   // - If the current code is unreachable, check if any values that may exist on
5268   //   top of the stack are compatible with {merge}. If {push_branch_values},
5269   //   push back to the stack values based on the type of {merge} (this is
5270   //   needed for conditional branches due to their typing rules, and
5271   //   fallthroughs so that the outer control finds the expected values on the
5272   //   stack). TODO(manoskouk): We expect the unreachable-code behavior to
5273   //   change, either due to relaxation of dead code verification, or the
5274   //   introduction of subtyping.
5275   template <StackElementsCountMode strict_count, bool push_branch_values,
5276             MergeType merge_type>
TypeCheckStackAgainstMerge(uint32_t drop_values, Merge<Value>* merge)5277   bool TypeCheckStackAgainstMerge(uint32_t drop_values, Merge<Value>* merge) {
5278     static_assert(validate, "Call this function only within VALIDATE");
5279     constexpr const char* merge_description =
5280         merge_type == kBranchMerge
5281             ? "branch"
5282             : merge_type == kReturnMerge
5283                   ? "return"
5284                   : merge_type == kInitExprMerge ? "init. expression"
5285                                                  : "fallthru";
5286     uint32_t arity = merge->arity;
5287     uint32_t actual = stack_size() - control_.back().stack_depth;
5288     // Here we have to check for !unreachable(), because we need to typecheck as
5289     // if the current code is reachable even if it is spec-only reachable.
5290     if (V8_LIKELY(decoding_mode == kInitExpression ||
5291                   !control_.back().unreachable())) {
5292       if (V8_UNLIKELY(strict_count ? actual != drop_values + arity
5293                                    : actual < drop_values + arity)) {
5294         this->DecodeError("expected %u elements on the stack for %s, found %u",
5295                           arity, merge_description,
5296                           actual >= drop_values ? actual - drop_values : 0);
5297         return false;
5298       }
5299       // Typecheck the topmost {merge->arity} values on the stack.
5300       Value* stack_values = stack_end_ - (arity + drop_values);
5301       for (uint32_t i = 0; i < arity; ++i) {
5302         Value& val = stack_values[i];
5303         Value& old = (*merge)[i];
5304         if (!IsSubtypeOf(val.type, old.type, this->module_)) {
5305           this->DecodeError("type error in %s[%u] (expected %s, got %s)",
5306                             merge_description, i, old.type.name().c_str(),
5307                             val.type.name().c_str());
5308           return false;
5309         }
5310       }
5311       return true;
5312     }
5313     // Unreachable code validation starts here.
5314     if (V8_UNLIKELY(strict_count && actual > drop_values + arity)) {
5315       this->DecodeError("expected %u elements on the stack for %s, found %u",
5316                         arity, merge_description,
5317                         actual >= drop_values ? actual - drop_values : 0);
5318       return false;
5319     }
5320     // TODO(manoskouk): Use similar code as above if we keep unreachable checks.
5321     for (int i = arity - 1, depth = drop_values; i >= 0; --i, ++depth) {
5322       Peek(depth, i, (*merge)[i].type);
5323     }
5324     if (push_branch_values) {
5325       uint32_t inserted_value_count =
5326           static_cast<uint32_t>(EnsureStackArguments(drop_values + arity));
5327       if (inserted_value_count > 0) {
5328         // EnsureStackSpace may have inserted unreachable values into the bottom
5329         // of the stack. If so, mark them with the correct type. If drop values
5330         // were also inserted, disregard them, as they will be dropped anyway.
5331         Value* stack_base = stack_value(drop_values + arity);
5332         for (uint32_t i = 0; i < std::min(arity, inserted_value_count); i++) {
5333           if (stack_base[i].type == kWasmBottom) {
5334             stack_base[i].type = (*merge)[i].type;
5335           }
5336         }
5337       }
5338     }
5339     return this->ok();
5340   }
5341 
5342   template <StackElementsCountMode strict_count, MergeType merge_type>
DoReturn()5343   bool DoReturn() {
5344     if (!VALIDATE((TypeCheckStackAgainstMerge<strict_count, false, merge_type>(
5345             0, &control_.front().end_merge)))) {
5346       return false;
5347     }
5348     DCHECK_IMPLIES(current_code_reachable_and_ok_,
5349                    stack_size() >= this->sig_->return_count());
5350     CALL_INTERFACE_IF_OK_AND_REACHABLE(DoReturn, 0);
5351     EndControl();
5352     return true;
5353   }
5354 
startrel(const byte* ptr)5355   int startrel(const byte* ptr) { return static_cast<int>(ptr - this->start_); }
5356 
FallThrough()5357   void FallThrough() {
5358     Control* c = &control_.back();
5359     DCHECK_NE(c->kind, kControlLoop);
5360     if (!VALIDATE(TypeCheckFallThru())) return;
5361     CALL_INTERFACE_IF_OK_AND_REACHABLE(FallThruTo, c);
5362     if (c->reachable()) c->end_merge.reached = true;
5363   }
5364 
TypeCheckOneArmedIf(Control* c)5365   bool TypeCheckOneArmedIf(Control* c) {
5366     static_assert(validate, "Call this function only within VALIDATE");
5367     DCHECK(c->is_onearmed_if());
5368     if (c->end_merge.arity != c->start_merge.arity) {
5369       this->DecodeError(c->pc(),
5370                         "start-arity and end-arity of one-armed if must match");
5371       return false;
5372     }
5373     for (uint32_t i = 0; i < c->start_merge.arity; ++i) {
5374       Value& start = c->start_merge[i];
5375       Value& end = c->end_merge[i];
5376       if (!IsSubtypeOf(start.type, end.type, this->module_)) {
5377         this->DecodeError("type error in merge[%u] (expected %s, got %s)", i,
5378                           end.type.name().c_str(), start.type.name().c_str());
5379         return false;
5380       }
5381     }
5382     return true;
5383   }
5384 
TypeCheckFallThru()5385   bool TypeCheckFallThru() {
5386     static_assert(validate, "Call this function only within VALIDATE");
5387     return TypeCheckStackAgainstMerge<kStrictCounting, true, kFallthroughMerge>(
5388         0, &control_.back().end_merge);
5389   }
5390 
5391   // If the current code is reachable, check if the current stack values are
5392   // compatible with a jump to {c}, based on their number and types.
5393   // Otherwise, we have a polymorphic stack: check if any values that may exist
5394   // on top of the stack are compatible with {c}. If {push_branch_values},
5395   // push back to the stack values based on the type of {c} (this is needed for
5396   // conditional branches due to their typing rules, and fallthroughs so that
5397   // the outer control finds enough values on the stack).
5398   // {drop_values} is the number of stack values that will be dropped before the
5399   // branch is taken. This is currently 1 for for br (condition), br_table
5400   // (index) and br_on_null (reference), and 0 for all other branches.
5401   template <bool push_branch_values>
TypeCheckBranch(Control* c, uint32_t drop_values)5402   bool TypeCheckBranch(Control* c, uint32_t drop_values) {
5403     static_assert(validate, "Call this function only within VALIDATE");
5404     return TypeCheckStackAgainstMerge<kNonStrictCounting, push_branch_values,
5405                                       kBranchMerge>(drop_values, c->br_merge());
5406   }
5407 
5408   void onFirstError() override {
5409     this->end_ = this->pc_;  // Terminate decoding loop.
5410     this->current_code_reachable_and_ok_ = false;
5411     TRACE(" !%s\n", this->error_.message().c_str());
5412     // Cannot use CALL_INTERFACE_* macros because we emitted an error.
5413     interface().OnFirstError(this);
5414   }
5415 
BuildSimplePrototypeOperator(WasmOpcode opcode)5416   int BuildSimplePrototypeOperator(WasmOpcode opcode) {
5417     if (opcode == kExprRefEq) {
5418       CHECK_PROTOTYPE_OPCODE(gc);
5419     }
5420     const FunctionSig* sig = WasmOpcodes::Signature(opcode);
5421     return BuildSimpleOperator(opcode, sig);
5422   }
5423 
BuildSimpleOperator(WasmOpcode opcode, const FunctionSig* sig)5424   int BuildSimpleOperator(WasmOpcode opcode, const FunctionSig* sig) {
5425     DCHECK_GE(1, sig->return_count());
5426     if (sig->parameter_count() == 1) {
5427       // All current simple unary operators have exactly 1 return value.
5428       DCHECK_EQ(1, sig->return_count());
5429       return BuildSimpleOperator(opcode, sig->GetReturn(0), sig->GetParam(0));
5430     } else {
5431       DCHECK_EQ(2, sig->parameter_count());
5432       ValueType ret = sig->return_count() == 0 ? kWasmVoid : sig->GetReturn(0);
5433       return BuildSimpleOperator(opcode, ret, sig->GetParam(0),
5434                                  sig->GetParam(1));
5435     }
5436   }
5437 
BuildSimpleOperator(WasmOpcode opcode, ValueType return_type, ValueType arg_type)5438   int BuildSimpleOperator(WasmOpcode opcode, ValueType return_type,
5439                           ValueType arg_type) {
5440     DCHECK_NE(kWasmVoid, return_type);
5441     Value val = Peek(0, 0, arg_type);
5442     Value ret = CreateValue(return_type);
5443     CALL_INTERFACE_IF_OK_AND_REACHABLE(UnOp, opcode, val, &ret);
5444     Drop(val);
5445     Push(ret);
5446     return 1;
5447   }
5448 
BuildSimpleOperator(WasmOpcode opcode, ValueType return_type, ValueType lhs_type, ValueType rhs_type)5449   int BuildSimpleOperator(WasmOpcode opcode, ValueType return_type,
5450                           ValueType lhs_type, ValueType rhs_type) {
5451     Value rval = Peek(0, 1, rhs_type);
5452     Value lval = Peek(1, 0, lhs_type);
5453     if (return_type == kWasmVoid) {
5454       CALL_INTERFACE_IF_OK_AND_REACHABLE(BinOp, opcode, lval, rval, nullptr);
5455       Drop(2);
5456     } else {
5457       Value ret = CreateValue(return_type);
5458       CALL_INTERFACE_IF_OK_AND_REACHABLE(BinOp, opcode, lval, rval, &ret);
5459       Drop(2);
5460       Push(ret);
5461     }
5462     return 1;
5463   }
5464 
5465 #define DEFINE_SIMPLE_SIG_OPERATOR(sig, ...)         \
5466   int BuildSimpleOperator_##sig(WasmOpcode opcode) { \
5467     return BuildSimpleOperator(opcode, __VA_ARGS__); \
5468   }
5469   FOREACH_SIGNATURE(DEFINE_SIMPLE_SIG_OPERATOR)
5470 #undef DEFINE_SIMPLE_SIG_OPERATOR
5471 };
5472 
5473 class EmptyInterface {
5474  public:
5475   static constexpr Decoder::ValidateFlag validate = Decoder::kFullValidation;
5476   static constexpr DecodingMode decoding_mode = kFunctionBody;
5477   using Value = ValueBase<validate>;
5478   using Control = ControlBase<Value, validate>;
5479   using FullDecoder = WasmFullDecoder<validate, EmptyInterface>;
5480 
5481 #define DEFINE_EMPTY_CALLBACK(name, ...) \
5482   void name(FullDecoder* decoder, ##__VA_ARGS__) {}
5483   INTERFACE_FUNCTIONS(DEFINE_EMPTY_CALLBACK)
5484 #undef DEFINE_EMPTY_CALLBACK
5485 };
5486 
5487 #undef CALL_INTERFACE_IF_OK_AND_REACHABLE
5488 #undef CALL_INTERFACE_IF_OK_AND_PARENT_REACHABLE
5489 #undef TRACE
5490 #undef TRACE_INST_FORMAT
5491 #undef VALIDATE
5492 #undef CHECK_PROTOTYPE_OPCODE
5493 
5494 }  // namespace wasm
5495 }  // namespace internal
5496 }  // namespace v8
5497 
5498 #endif  // V8_WASM_FUNCTION_BODY_DECODER_IMPL_H_
5499