xref: /third_party/node/deps/v8/src/wasm/wasm-opcodes.h (revision 1cb0ef41)
1// Copyright 2015 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#if !V8_ENABLE_WEBASSEMBLY
6#error This header should only be included if WebAssembly is enabled.
7#endif  // !V8_ENABLE_WEBASSEMBLY
8
9#ifndef V8_WASM_WASM_OPCODES_H_
10#define V8_WASM_WASM_OPCODES_H_
11
12#include <memory>
13
14#include "src/base/platform/wrappers.h"
15#include "src/common/globals.h"
16#include "src/common/message-template.h"
17#include "src/wasm/value-type.h"
18#include "src/wasm/wasm-constants.h"
19
20namespace v8 {
21namespace internal {
22
23namespace wasm {
24
25class WasmFeatures;
26struct WasmModule;
27
28std::ostream& operator<<(std::ostream& os, const FunctionSig& function);
29bool V8_EXPORT_PRIVATE IsJSCompatibleSignature(const FunctionSig* sig,
30                                               const WasmModule* module,
31                                               const WasmFeatures&);
32
33// Control expressions and blocks.
34#define FOREACH_CONTROL_OPCODE(V)               \
35  V(Unreachable, 0x00, _)                       \
36  V(Nop, 0x01, _)                               \
37  V(Block, 0x02, _)                             \
38  V(Loop, 0x03, _)                              \
39  V(If, 0x04, _)                                \
40  V(Else, 0x05, _)                              \
41  V(Try, 0x06, _ /* eh_prototype */)            \
42  V(Catch, 0x07, _ /* eh_prototype */)          \
43  V(Throw, 0x08, _ /* eh_prototype */)          \
44  V(Rethrow, 0x09, _ /* eh_prototype */)        \
45  V(End, 0x0b, _)                               \
46  V(Br, 0x0c, _)                                \
47  V(BrIf, 0x0d, _)                              \
48  V(BrTable, 0x0e, _)                           \
49  V(Return, 0x0f, _)                            \
50  V(Let, 0x17, _ /* typed_funcref prototype */) \
51  V(Delegate, 0x18, _ /* eh_prototype */)       \
52  V(CatchAll, 0x19, _ /* eh_prototype */)       \
53  V(BrOnNull, 0xd4, _ /* gc prototype */)       \
54  V(BrOnNonNull, 0xd6, _ /* gc prototype */)    \
55  V(NopForTestingUnsupportedInLiftoff, 0x16, _)
56
57// Constants, locals, globals, and calls.
58#define FOREACH_MISC_OPCODE(V)                            \
59  V(CallFunction, 0x10, _)                                \
60  V(CallIndirect, 0x11, _)                                \
61  V(ReturnCall, 0x12, _)                                  \
62  V(ReturnCallIndirect, 0x13, _)                          \
63  V(CallRef, 0x14, _ /* typed_funcref prototype */)       \
64  V(ReturnCallRef, 0x15, _ /* typed_funcref prototype */) \
65  V(Drop, 0x1a, _)                                        \
66  V(Select, 0x1b, _)                                      \
67  V(SelectWithType, 0x1c, _)                              \
68  V(LocalGet, 0x20, _)                                    \
69  V(LocalSet, 0x21, _)                                    \
70  V(LocalTee, 0x22, _)                                    \
71  V(GlobalGet, 0x23, _)                                   \
72  V(GlobalSet, 0x24, _)                                   \
73  V(TableGet, 0x25, _)                                    \
74  V(TableSet, 0x26, _)                                    \
75  V(I32Const, 0x41, _)                                    \
76  V(I64Const, 0x42, _)                                    \
77  V(F32Const, 0x43, _)                                    \
78  V(F64Const, 0x44, _)                                    \
79  V(RefNull, 0xd0, _)                                     \
80  V(RefIsNull, 0xd1, _)                                   \
81  V(RefFunc, 0xd2, _)                                     \
82  V(RefAsNonNull, 0xd3, _ /* typed_funcref prototype */)
83
84// Load memory expressions.
85#define FOREACH_LOAD_MEM_OPCODE(V) \
86  V(I32LoadMem, 0x28, i_i)         \
87  V(I64LoadMem, 0x29, l_i)         \
88  V(F32LoadMem, 0x2a, f_i)         \
89  V(F64LoadMem, 0x2b, d_i)         \
90  V(I32LoadMem8S, 0x2c, i_i)       \
91  V(I32LoadMem8U, 0x2d, i_i)       \
92  V(I32LoadMem16S, 0x2e, i_i)      \
93  V(I32LoadMem16U, 0x2f, i_i)      \
94  V(I64LoadMem8S, 0x30, l_i)       \
95  V(I64LoadMem8U, 0x31, l_i)       \
96  V(I64LoadMem16S, 0x32, l_i)      \
97  V(I64LoadMem16U, 0x33, l_i)      \
98  V(I64LoadMem32S, 0x34, l_i)      \
99  V(I64LoadMem32U, 0x35, l_i)
100
101// Store memory expressions.
102#define FOREACH_STORE_MEM_OPCODE(V) \
103  V(I32StoreMem, 0x36, v_ii)        \
104  V(I64StoreMem, 0x37, v_il)        \
105  V(F32StoreMem, 0x38, v_if)        \
106  V(F64StoreMem, 0x39, v_id)        \
107  V(I32StoreMem8, 0x3a, v_ii)       \
108  V(I32StoreMem16, 0x3b, v_ii)      \
109  V(I64StoreMem8, 0x3c, v_il)       \
110  V(I64StoreMem16, 0x3d, v_il)      \
111  V(I64StoreMem32, 0x3e, v_il)
112
113// Miscellaneous memory expressions
114#define FOREACH_MISC_MEM_OPCODE(V) \
115  V(MemorySize, 0x3f, i_v)         \
116  V(MemoryGrow, 0x40, i_i)
117
118// Expressions with signatures.
119
120// The following opcodes can be used as constant expressions under
121// --experimental-wasm-extended-const.
122#define FOREACH_SIMPLE_EXTENDED_CONST_OPCODE(V) \
123  V(I32Add, 0x6a, i_ii)                         \
124  V(I32Sub, 0x6b, i_ii)                         \
125  V(I32Mul, 0x6c, i_ii)                         \
126  V(I64Add, 0x7c, l_ll)                         \
127  V(I64Sub, 0x7d, l_ll)                         \
128  V(I64Mul, 0x7e, l_ll)
129
130#define FOREACH_SIMPLE_NON_CONST_OPCODE(V) \
131  V(I32Eqz, 0x45, i_i)                     \
132  V(I32Eq, 0x46, i_ii)                     \
133  V(I32Ne, 0x47, i_ii)                     \
134  V(I32LtS, 0x48, i_ii)                    \
135  V(I32LtU, 0x49, i_ii)                    \
136  V(I32GtS, 0x4a, i_ii)                    \
137  V(I32GtU, 0x4b, i_ii)                    \
138  V(I32LeS, 0x4c, i_ii)                    \
139  V(I32LeU, 0x4d, i_ii)                    \
140  V(I32GeS, 0x4e, i_ii)                    \
141  V(I32GeU, 0x4f, i_ii)                    \
142  V(I64Eqz, 0x50, i_l)                     \
143  V(I64Eq, 0x51, i_ll)                     \
144  V(I64Ne, 0x52, i_ll)                     \
145  V(I64LtS, 0x53, i_ll)                    \
146  V(I64LtU, 0x54, i_ll)                    \
147  V(I64GtS, 0x55, i_ll)                    \
148  V(I64GtU, 0x56, i_ll)                    \
149  V(I64LeS, 0x57, i_ll)                    \
150  V(I64LeU, 0x58, i_ll)                    \
151  V(I64GeS, 0x59, i_ll)                    \
152  V(I64GeU, 0x5a, i_ll)                    \
153  V(F32Eq, 0x5b, i_ff)                     \
154  V(F32Ne, 0x5c, i_ff)                     \
155  V(F32Lt, 0x5d, i_ff)                     \
156  V(F32Gt, 0x5e, i_ff)                     \
157  V(F32Le, 0x5f, i_ff)                     \
158  V(F32Ge, 0x60, i_ff)                     \
159  V(F64Eq, 0x61, i_dd)                     \
160  V(F64Ne, 0x62, i_dd)                     \
161  V(F64Lt, 0x63, i_dd)                     \
162  V(F64Gt, 0x64, i_dd)                     \
163  V(F64Le, 0x65, i_dd)                     \
164  V(F64Ge, 0x66, i_dd)                     \
165  V(I32Clz, 0x67, i_i)                     \
166  V(I32Ctz, 0x68, i_i)                     \
167  V(I32Popcnt, 0x69, i_i)                  \
168  V(I32DivS, 0x6d, i_ii)                   \
169  V(I32DivU, 0x6e, i_ii)                   \
170  V(I32RemS, 0x6f, i_ii)                   \
171  V(I32RemU, 0x70, i_ii)                   \
172  V(I32And, 0x71, i_ii)                    \
173  V(I32Ior, 0x72, i_ii)                    \
174  V(I32Xor, 0x73, i_ii)                    \
175  V(I32Shl, 0x74, i_ii)                    \
176  V(I32ShrS, 0x75, i_ii)                   \
177  V(I32ShrU, 0x76, i_ii)                   \
178  V(I32Rol, 0x77, i_ii)                    \
179  V(I32Ror, 0x78, i_ii)                    \
180  V(I64Clz, 0x79, l_l)                     \
181  V(I64Ctz, 0x7a, l_l)                     \
182  V(I64Popcnt, 0x7b, l_l)                  \
183  V(I64DivS, 0x7f, l_ll)                   \
184  V(I64DivU, 0x80, l_ll)                   \
185  V(I64RemS, 0x81, l_ll)                   \
186  V(I64RemU, 0x82, l_ll)                   \
187  V(I64And, 0x83, l_ll)                    \
188  V(I64Ior, 0x84, l_ll)                    \
189  V(I64Xor, 0x85, l_ll)                    \
190  V(I64Shl, 0x86, l_ll)                    \
191  V(I64ShrS, 0x87, l_ll)                   \
192  V(I64ShrU, 0x88, l_ll)                   \
193  V(I64Rol, 0x89, l_ll)                    \
194  V(I64Ror, 0x8a, l_ll)                    \
195  V(F32Abs, 0x8b, f_f)                     \
196  V(F32Neg, 0x8c, f_f)                     \
197  V(F32Ceil, 0x8d, f_f)                    \
198  V(F32Floor, 0x8e, f_f)                   \
199  V(F32Trunc, 0x8f, f_f)                   \
200  V(F32NearestInt, 0x90, f_f)              \
201  V(F32Sqrt, 0x91, f_f)                    \
202  V(F32Add, 0x92, f_ff)                    \
203  V(F32Sub, 0x93, f_ff)                    \
204  V(F32Mul, 0x94, f_ff)                    \
205  V(F32Div, 0x95, f_ff)                    \
206  V(F32Min, 0x96, f_ff)                    \
207  V(F32Max, 0x97, f_ff)                    \
208  V(F32CopySign, 0x98, f_ff)               \
209  V(F64Abs, 0x99, d_d)                     \
210  V(F64Neg, 0x9a, d_d)                     \
211  V(F64Ceil, 0x9b, d_d)                    \
212  V(F64Floor, 0x9c, d_d)                   \
213  V(F64Trunc, 0x9d, d_d)                   \
214  V(F64NearestInt, 0x9e, d_d)              \
215  V(F64Sqrt, 0x9f, d_d)                    \
216  V(F64Add, 0xa0, d_dd)                    \
217  V(F64Sub, 0xa1, d_dd)                    \
218  V(F64Mul, 0xa2, d_dd)                    \
219  V(F64Div, 0xa3, d_dd)                    \
220  V(F64Min, 0xa4, d_dd)                    \
221  V(F64Max, 0xa5, d_dd)                    \
222  V(F64CopySign, 0xa6, d_dd)               \
223  V(I32ConvertI64, 0xa7, i_l)              \
224  V(I32SConvertF32, 0xa8, i_f)             \
225  V(I32UConvertF32, 0xa9, i_f)             \
226  V(I32SConvertF64, 0xaa, i_d)             \
227  V(I32UConvertF64, 0xab, i_d)             \
228  V(I64SConvertI32, 0xac, l_i)             \
229  V(I64UConvertI32, 0xad, l_i)             \
230  V(I64SConvertF32, 0xae, l_f)             \
231  V(I64UConvertF32, 0xaf, l_f)             \
232  V(I64SConvertF64, 0xb0, l_d)             \
233  V(I64UConvertF64, 0xb1, l_d)             \
234  V(F32SConvertI32, 0xb2, f_i)             \
235  V(F32UConvertI32, 0xb3, f_i)             \
236  V(F32SConvertI64, 0xb4, f_l)             \
237  V(F32UConvertI64, 0xb5, f_l)             \
238  V(F32ConvertF64, 0xb6, f_d)              \
239  V(F64SConvertI32, 0xb7, d_i)             \
240  V(F64UConvertI32, 0xb8, d_i)             \
241  V(F64SConvertI64, 0xb9, d_l)             \
242  V(F64UConvertI64, 0xba, d_l)             \
243  V(F64ConvertF32, 0xbb, d_f)              \
244  V(I32ReinterpretF32, 0xbc, i_f)          \
245  V(I64ReinterpretF64, 0xbd, l_d)          \
246  V(F32ReinterpretI32, 0xbe, f_i)          \
247  V(F64ReinterpretI64, 0xbf, d_l)          \
248  V(I32SExtendI8, 0xc0, i_i)               \
249  V(I32SExtendI16, 0xc1, i_i)              \
250  V(I64SExtendI8, 0xc2, l_l)               \
251  V(I64SExtendI16, 0xc3, l_l)              \
252  V(I64SExtendI32, 0xc4, l_l)
253
254#define FOREACH_SIMPLE_OPCODE(V)          \
255  FOREACH_SIMPLE_EXTENDED_CONST_OPCODE(V) \
256  FOREACH_SIMPLE_NON_CONST_OPCODE(V)
257
258#define FOREACH_SIMPLE_PROTOTYPE_OPCODE(V) V(RefEq, 0xd5, i_qq)
259
260// For compatibility with Asm.js.
261// These opcodes are not spec'ed (or visible) externally; the idea is
262// to use unused ranges for internal purposes.
263#define FOREACH_ASMJS_COMPAT_OPCODE(V) \
264  V(F64Acos, 0xdc, d_d)                \
265  V(F64Asin, 0xdd, d_d)                \
266  V(F64Atan, 0xde, d_d)                \
267  V(F64Cos, 0xdf, d_d)                 \
268  V(F64Sin, 0xe0, d_d)                 \
269  V(F64Tan, 0xe1, d_d)                 \
270  V(F64Exp, 0xe2, d_d)                 \
271  V(F64Log, 0xe3, d_d)                 \
272  V(F64Atan2, 0xe4, d_dd)              \
273  V(F64Pow, 0xe5, d_dd)                \
274  V(F64Mod, 0xe6, d_dd)                \
275  V(I32AsmjsDivS, 0xe7, i_ii)          \
276  V(I32AsmjsDivU, 0xe8, i_ii)          \
277  V(I32AsmjsRemS, 0xe9, i_ii)          \
278  V(I32AsmjsRemU, 0xea, i_ii)          \
279  V(I32AsmjsLoadMem8S, 0xeb, i_i)      \
280  V(I32AsmjsLoadMem8U, 0xec, i_i)      \
281  V(I32AsmjsLoadMem16S, 0xed, i_i)     \
282  V(I32AsmjsLoadMem16U, 0xee, i_i)     \
283  V(I32AsmjsLoadMem, 0xef, i_i)        \
284  V(F32AsmjsLoadMem, 0xf0, f_i)        \
285  V(F64AsmjsLoadMem, 0xf1, d_i)        \
286  V(I32AsmjsStoreMem8, 0xf2, i_ii)     \
287  V(I32AsmjsStoreMem16, 0xf3, i_ii)    \
288  V(I32AsmjsStoreMem, 0xf4, i_ii)      \
289  V(F32AsmjsStoreMem, 0xf5, f_if)      \
290  V(F64AsmjsStoreMem, 0xf6, d_id)      \
291  V(I32AsmjsSConvertF32, 0xf7, i_f)    \
292  V(I32AsmjsUConvertF32, 0xf8, i_f)    \
293  V(I32AsmjsSConvertF64, 0xf9, i_d)    \
294  V(I32AsmjsUConvertF64, 0xfa, i_d)
295
296#define FOREACH_SIMD_MEM_OPCODE(V) \
297  V(S128LoadMem, 0xfd00, s_i)      \
298  V(S128Load8x8S, 0xfd01, s_i)     \
299  V(S128Load8x8U, 0xfd02, s_i)     \
300  V(S128Load16x4S, 0xfd03, s_i)    \
301  V(S128Load16x4U, 0xfd04, s_i)    \
302  V(S128Load32x2S, 0xfd05, s_i)    \
303  V(S128Load32x2U, 0xfd06, s_i)    \
304  V(S128Load8Splat, 0xfd07, s_i)   \
305  V(S128Load16Splat, 0xfd08, s_i)  \
306  V(S128Load32Splat, 0xfd09, s_i)  \
307  V(S128Load64Splat, 0xfd0a, s_i)  \
308  V(S128StoreMem, 0xfd0b, v_is)    \
309  V(S128Load32Zero, 0xfd5c, s_i)   \
310  V(S128Load64Zero, 0xfd5d, s_i)
311
312#define FOREACH_SIMD_MEM_1_OPERAND_OPCODE(V) \
313  V(S128Load8Lane, 0xfd54, s_is)             \
314  V(S128Load16Lane, 0xfd55, s_is)            \
315  V(S128Load32Lane, 0xfd56, s_is)            \
316  V(S128Load64Lane, 0xfd57, s_is)            \
317  V(S128Store8Lane, 0xfd58, v_is)            \
318  V(S128Store16Lane, 0xfd59, v_is)           \
319  V(S128Store32Lane, 0xfd5a, v_is)           \
320  V(S128Store64Lane, 0xfd5b, v_is)
321
322#define FOREACH_SIMD_CONST_OPCODE(V) V(S128Const, 0xfd0c, _)
323
324#define FOREACH_SIMD_MASK_OPERAND_OPCODE(V) V(I8x16Shuffle, 0xfd0d, s_ss)
325
326#define FOREACH_SIMD_MVP_0_OPERAND_OPCODE(V) \
327  V(I8x16Swizzle, 0xfd0e, s_ss)              \
328  V(I8x16Splat, 0xfd0f, s_i)                 \
329  V(I16x8Splat, 0xfd10, s_i)                 \
330  V(I32x4Splat, 0xfd11, s_i)                 \
331  V(I64x2Splat, 0xfd12, s_l)                 \
332  V(F32x4Splat, 0xfd13, s_f)                 \
333  V(F64x2Splat, 0xfd14, s_d)                 \
334  V(I8x16Eq, 0xfd23, s_ss)                   \
335  V(I8x16Ne, 0xfd24, s_ss)                   \
336  V(I8x16LtS, 0xfd25, s_ss)                  \
337  V(I8x16LtU, 0xfd26, s_ss)                  \
338  V(I8x16GtS, 0xfd27, s_ss)                  \
339  V(I8x16GtU, 0xfd28, s_ss)                  \
340  V(I8x16LeS, 0xfd29, s_ss)                  \
341  V(I8x16LeU, 0xfd2a, s_ss)                  \
342  V(I8x16GeS, 0xfd2b, s_ss)                  \
343  V(I8x16GeU, 0xfd2c, s_ss)                  \
344  V(I16x8Eq, 0xfd2d, s_ss)                   \
345  V(I16x8Ne, 0xfd2e, s_ss)                   \
346  V(I16x8LtS, 0xfd2f, s_ss)                  \
347  V(I16x8LtU, 0xfd30, s_ss)                  \
348  V(I16x8GtS, 0xfd31, s_ss)                  \
349  V(I16x8GtU, 0xfd32, s_ss)                  \
350  V(I16x8LeS, 0xfd33, s_ss)                  \
351  V(I16x8LeU, 0xfd34, s_ss)                  \
352  V(I16x8GeS, 0xfd35, s_ss)                  \
353  V(I16x8GeU, 0xfd36, s_ss)                  \
354  V(I32x4Eq, 0xfd37, s_ss)                   \
355  V(I32x4Ne, 0xfd38, s_ss)                   \
356  V(I32x4LtS, 0xfd39, s_ss)                  \
357  V(I32x4LtU, 0xfd3a, s_ss)                  \
358  V(I32x4GtS, 0xfd3b, s_ss)                  \
359  V(I32x4GtU, 0xfd3c, s_ss)                  \
360  V(I32x4LeS, 0xfd3d, s_ss)                  \
361  V(I32x4LeU, 0xfd3e, s_ss)                  \
362  V(I32x4GeS, 0xfd3f, s_ss)                  \
363  V(I32x4GeU, 0xfd40, s_ss)                  \
364  V(F32x4Eq, 0xfd41, s_ss)                   \
365  V(F32x4Ne, 0xfd42, s_ss)                   \
366  V(F32x4Lt, 0xfd43, s_ss)                   \
367  V(F32x4Gt, 0xfd44, s_ss)                   \
368  V(F32x4Le, 0xfd45, s_ss)                   \
369  V(F32x4Ge, 0xfd46, s_ss)                   \
370  V(F64x2Eq, 0xfd47, s_ss)                   \
371  V(F64x2Ne, 0xfd48, s_ss)                   \
372  V(F64x2Lt, 0xfd49, s_ss)                   \
373  V(F64x2Gt, 0xfd4a, s_ss)                   \
374  V(F64x2Le, 0xfd4b, s_ss)                   \
375  V(F64x2Ge, 0xfd4c, s_ss)                   \
376  V(S128Not, 0xfd4d, s_s)                    \
377  V(S128And, 0xfd4e, s_ss)                   \
378  V(S128AndNot, 0xfd4f, s_ss)                \
379  V(S128Or, 0xfd50, s_ss)                    \
380  V(S128Xor, 0xfd51, s_ss)                   \
381  V(S128Select, 0xfd52, s_sss)               \
382  V(V128AnyTrue, 0xfd53, i_s)                \
383  V(F32x4DemoteF64x2Zero, 0xfd5e, s_s)       \
384  V(F64x2PromoteLowF32x4, 0xfd5f, s_s)       \
385  V(I8x16Abs, 0xfd60, s_s)                   \
386  V(I8x16Neg, 0xfd61, s_s)                   \
387  V(I8x16Popcnt, 0xfd62, s_s)                \
388  V(I8x16AllTrue, 0xfd63, i_s)               \
389  V(I8x16BitMask, 0xfd64, i_s)               \
390  V(I8x16SConvertI16x8, 0xfd65, s_ss)        \
391  V(I8x16UConvertI16x8, 0xfd66, s_ss)        \
392  V(F32x4Ceil, 0xfd67, s_s)                  \
393  V(F32x4Floor, 0xfd68, s_s)                 \
394  V(F32x4Trunc, 0xfd69, s_s)                 \
395  V(F32x4NearestInt, 0xfd6a, s_s)            \
396  V(I8x16Shl, 0xfd6b, s_si)                  \
397  V(I8x16ShrS, 0xfd6c, s_si)                 \
398  V(I8x16ShrU, 0xfd6d, s_si)                 \
399  V(I8x16Add, 0xfd6e, s_ss)                  \
400  V(I8x16AddSatS, 0xfd6f, s_ss)              \
401  V(I8x16AddSatU, 0xfd70, s_ss)              \
402  V(I8x16Sub, 0xfd71, s_ss)                  \
403  V(I8x16SubSatS, 0xfd72, s_ss)              \
404  V(I8x16SubSatU, 0xfd73, s_ss)              \
405  V(F64x2Ceil, 0xfd74, s_s)                  \
406  V(F64x2Floor, 0xfd75, s_s)                 \
407  V(I8x16MinS, 0xfd76, s_ss)                 \
408  V(I8x16MinU, 0xfd77, s_ss)                 \
409  V(I8x16MaxS, 0xfd78, s_ss)                 \
410  V(I8x16MaxU, 0xfd79, s_ss)                 \
411  V(F64x2Trunc, 0xfd7a, s_s)                 \
412  V(I8x16RoundingAverageU, 0xfd7b, s_ss)     \
413  V(I16x8ExtAddPairwiseI8x16S, 0xfd7c, s_s)  \
414  V(I16x8ExtAddPairwiseI8x16U, 0xfd7d, s_s)  \
415  V(I32x4ExtAddPairwiseI16x8S, 0xfd7e, s_s)  \
416  V(I32x4ExtAddPairwiseI16x8U, 0xfd7f, s_s)  \
417  V(I16x8Abs, 0xfd80, s_s)                   \
418  V(I16x8Neg, 0xfd81, s_s)                   \
419  V(I16x8Q15MulRSatS, 0xfd82, s_ss)          \
420  V(I16x8AllTrue, 0xfd83, i_s)               \
421  V(I16x8BitMask, 0xfd84, i_s)               \
422  V(I16x8SConvertI32x4, 0xfd85, s_ss)        \
423  V(I16x8UConvertI32x4, 0xfd86, s_ss)        \
424  V(I16x8SConvertI8x16Low, 0xfd87, s_s)      \
425  V(I16x8SConvertI8x16High, 0xfd88, s_s)     \
426  V(I16x8UConvertI8x16Low, 0xfd89, s_s)      \
427  V(I16x8UConvertI8x16High, 0xfd8a, s_s)     \
428  V(I16x8Shl, 0xfd8b, s_si)                  \
429  V(I16x8ShrS, 0xfd8c, s_si)                 \
430  V(I16x8ShrU, 0xfd8d, s_si)                 \
431  V(I16x8Add, 0xfd8e, s_ss)                  \
432  V(I16x8AddSatS, 0xfd8f, s_ss)              \
433  V(I16x8AddSatU, 0xfd90, s_ss)              \
434  V(I16x8Sub, 0xfd91, s_ss)                  \
435  V(I16x8SubSatS, 0xfd92, s_ss)              \
436  V(I16x8SubSatU, 0xfd93, s_ss)              \
437  V(F64x2NearestInt, 0xfd94, s_s)            \
438  V(I16x8Mul, 0xfd95, s_ss)                  \
439  V(I16x8MinS, 0xfd96, s_ss)                 \
440  V(I16x8MinU, 0xfd97, s_ss)                 \
441  V(I16x8MaxS, 0xfd98, s_ss)                 \
442  V(I16x8MaxU, 0xfd99, s_ss)                 \
443  V(I16x8RoundingAverageU, 0xfd9b, s_ss)     \
444  V(I16x8ExtMulLowI8x16S, 0xfd9c, s_ss)      \
445  V(I16x8ExtMulHighI8x16S, 0xfd9d, s_ss)     \
446  V(I16x8ExtMulLowI8x16U, 0xfd9e, s_ss)      \
447  V(I16x8ExtMulHighI8x16U, 0xfd9f, s_ss)     \
448  V(I32x4Abs, 0xfda0, s_s)                   \
449  V(I32x4Neg, 0xfda1, s_s)                   \
450  V(I32x4AllTrue, 0xfda3, i_s)               \
451  V(I32x4BitMask, 0xfda4, i_s)               \
452  V(I32x4SConvertI16x8Low, 0xfda7, s_s)      \
453  V(I32x4SConvertI16x8High, 0xfda8, s_s)     \
454  V(I32x4UConvertI16x8Low, 0xfda9, s_s)      \
455  V(I32x4UConvertI16x8High, 0xfdaa, s_s)     \
456  V(I32x4Shl, 0xfdab, s_si)                  \
457  V(I32x4ShrS, 0xfdac, s_si)                 \
458  V(I32x4ShrU, 0xfdad, s_si)                 \
459  V(I32x4Add, 0xfdae, s_ss)                  \
460  V(I32x4Sub, 0xfdb1, s_ss)                  \
461  V(I32x4Mul, 0xfdb5, s_ss)                  \
462  V(I32x4MinS, 0xfdb6, s_ss)                 \
463  V(I32x4MinU, 0xfdb7, s_ss)                 \
464  V(I32x4MaxS, 0xfdb8, s_ss)                 \
465  V(I32x4MaxU, 0xfdb9, s_ss)                 \
466  V(I32x4DotI16x8S, 0xfdba, s_ss)            \
467  V(I32x4ExtMulLowI16x8S, 0xfdbc, s_ss)      \
468  V(I32x4ExtMulHighI16x8S, 0xfdbd, s_ss)     \
469  V(I32x4ExtMulLowI16x8U, 0xfdbe, s_ss)      \
470  V(I32x4ExtMulHighI16x8U, 0xfdbf, s_ss)     \
471  V(I64x2Abs, 0xfdc0, s_s)                   \
472  V(I64x2Neg, 0xfdc1, s_s)                   \
473  V(I64x2AllTrue, 0xfdc3, i_s)               \
474  V(I64x2BitMask, 0xfdc4, i_s)               \
475  V(I64x2SConvertI32x4Low, 0xfdc7, s_s)      \
476  V(I64x2SConvertI32x4High, 0xfdc8, s_s)     \
477  V(I64x2UConvertI32x4Low, 0xfdc9, s_s)      \
478  V(I64x2UConvertI32x4High, 0xfdca, s_s)     \
479  V(I64x2Shl, 0xfdcb, s_si)                  \
480  V(I64x2ShrS, 0xfdcc, s_si)                 \
481  V(I64x2ShrU, 0xfdcd, s_si)                 \
482  V(I64x2Add, 0xfdce, s_ss)                  \
483  V(I64x2Sub, 0xfdd1, s_ss)                  \
484  V(I64x2Mul, 0xfdd5, s_ss)                  \
485  V(I64x2Eq, 0xfdd6, s_ss)                   \
486  V(I64x2Ne, 0xfdd7, s_ss)                   \
487  V(I64x2LtS, 0xfdd8, s_ss)                  \
488  V(I64x2GtS, 0xfdd9, s_ss)                  \
489  V(I64x2LeS, 0xfdda, s_ss)                  \
490  V(I64x2GeS, 0xfddb, s_ss)                  \
491  V(I64x2ExtMulLowI32x4S, 0xfddc, s_ss)      \
492  V(I64x2ExtMulHighI32x4S, 0xfddd, s_ss)     \
493  V(I64x2ExtMulLowI32x4U, 0xfdde, s_ss)      \
494  V(I64x2ExtMulHighI32x4U, 0xfddf, s_ss)     \
495  V(F32x4Abs, 0xfde0, s_s)                   \
496  V(F32x4Neg, 0xfde1, s_s)                   \
497  V(F32x4Sqrt, 0xfde3, s_s)                  \
498  V(F32x4Add, 0xfde4, s_ss)                  \
499  V(F32x4Sub, 0xfde5, s_ss)                  \
500  V(F32x4Mul, 0xfde6, s_ss)                  \
501  V(F32x4Div, 0xfde7, s_ss)                  \
502  V(F32x4Min, 0xfde8, s_ss)                  \
503  V(F32x4Max, 0xfde9, s_ss)                  \
504  V(F32x4Pmin, 0xfdea, s_ss)                 \
505  V(F32x4Pmax, 0xfdeb, s_ss)                 \
506  V(F64x2Abs, 0xfdec, s_s)                   \
507  V(F64x2Neg, 0xfded, s_s)                   \
508  V(F64x2Sqrt, 0xfdef, s_s)                  \
509  V(F64x2Add, 0xfdf0, s_ss)                  \
510  V(F64x2Sub, 0xfdf1, s_ss)                  \
511  V(F64x2Mul, 0xfdf2, s_ss)                  \
512  V(F64x2Div, 0xfdf3, s_ss)                  \
513  V(F64x2Min, 0xfdf4, s_ss)                  \
514  V(F64x2Max, 0xfdf5, s_ss)                  \
515  V(F64x2Pmin, 0xfdf6, s_ss)                 \
516  V(F64x2Pmax, 0xfdf7, s_ss)                 \
517  V(I32x4SConvertF32x4, 0xfdf8, s_s)         \
518  V(I32x4UConvertF32x4, 0xfdf9, s_s)         \
519  V(F32x4SConvertI32x4, 0xfdfa, s_s)         \
520  V(F32x4UConvertI32x4, 0xfdfb, s_s)         \
521  V(I32x4TruncSatF64x2SZero, 0xfdfc, s_s)    \
522  V(I32x4TruncSatF64x2UZero, 0xfdfd, s_s)    \
523  V(F64x2ConvertLowI32x4S, 0xfdfe, s_s)      \
524  V(F64x2ConvertLowI32x4U, 0xfdff, s_s)
525
526#define FOREACH_RELAXED_SIMD_OPCODE(V)        \
527  V(I8x16RelaxedSwizzle, 0xfda2, s_ss)        \
528  V(I8x16RelaxedLaneSelect, 0xfdb2, s_sss)    \
529  V(I16x8RelaxedLaneSelect, 0xfdb3, s_sss)    \
530  V(I32x4RelaxedLaneSelect, 0xfdd2, s_sss)    \
531  V(I64x2RelaxedLaneSelect, 0xfdd3, s_sss)    \
532  V(F32x4Qfma, 0xfdaf, s_sss)                 \
533  V(F32x4Qfms, 0xfdb0, s_sss)                 \
534  V(F64x2Qfma, 0xfdcf, s_sss)                 \
535  V(F64x2Qfms, 0xfdd0, s_sss)                 \
536  V(F32x4RelaxedMin, 0xfdb4, s_ss)            \
537  V(F32x4RelaxedMax, 0xfde2, s_ss)            \
538  V(F64x2RelaxedMin, 0xfdd4, s_ss)            \
539  V(F64x2RelaxedMax, 0xfdee, s_ss)            \
540  V(I32x4RelaxedTruncF32x4S, 0xfda5, s_s)     \
541  V(I32x4RelaxedTruncF32x4U, 0xfda6, s_s)     \
542  V(I32x4RelaxedTruncF64x2SZero, 0xfdc5, s_s) \
543  V(I32x4RelaxedTruncF64x2UZero, 0xfdc6, s_s) \
544  V(F32x4RecipApprox, 0xfdbb, s_s)            \
545  V(F32x4RecipSqrtApprox, 0xfdc2, s_s)
546
547#define FOREACH_SIMD_1_OPERAND_1_PARAM_OPCODE(V) \
548  V(I8x16ExtractLaneS, 0xfd15, _)                \
549  V(I8x16ExtractLaneU, 0xfd16, _)                \
550  V(I16x8ExtractLaneS, 0xfd18, _)                \
551  V(I16x8ExtractLaneU, 0xfd19, _)                \
552  V(I32x4ExtractLane, 0xfd1b, _)                 \
553  V(I64x2ExtractLane, 0xfd1d, _)                 \
554  V(F32x4ExtractLane, 0xfd1f, _)                 \
555  V(F64x2ExtractLane, 0xfd21, _)
556
557#define FOREACH_SIMD_1_OPERAND_2_PARAM_OPCODE(V) \
558  V(I8x16ReplaceLane, 0xfd17, _)                 \
559  V(I16x8ReplaceLane, 0xfd1a, _)                 \
560  V(I32x4ReplaceLane, 0xfd1c, _)                 \
561  V(I64x2ReplaceLane, 0xfd1e, _)                 \
562  V(F32x4ReplaceLane, 0xfd20, _)                 \
563  V(F64x2ReplaceLane, 0xfd22, _)
564
565#define FOREACH_SIMD_0_OPERAND_OPCODE(V) \
566  FOREACH_SIMD_MVP_0_OPERAND_OPCODE(V)   \
567  FOREACH_RELAXED_SIMD_OPCODE(V)
568
569#define FOREACH_SIMD_1_OPERAND_OPCODE(V)   \
570  FOREACH_SIMD_1_OPERAND_1_PARAM_OPCODE(V) \
571  FOREACH_SIMD_1_OPERAND_2_PARAM_OPCODE(V)
572
573#define FOREACH_SIMD_OPCODE(V)         \
574  FOREACH_SIMD_0_OPERAND_OPCODE(V)     \
575  FOREACH_SIMD_1_OPERAND_OPCODE(V)     \
576  FOREACH_SIMD_MASK_OPERAND_OPCODE(V)  \
577  FOREACH_SIMD_MEM_OPCODE(V)           \
578  FOREACH_SIMD_MEM_1_OPERAND_OPCODE(V) \
579  FOREACH_SIMD_CONST_OPCODE(V)
580
581#define FOREACH_NUMERIC_OPCODE(V_SIG, V_VARIADIC)         \
582  V_SIG(I32SConvertSatF32, 0xfc00, i_f)                   \
583  V_SIG(I32UConvertSatF32, 0xfc01, i_f)                   \
584  V_SIG(I32SConvertSatF64, 0xfc02, i_d)                   \
585  V_SIG(I32UConvertSatF64, 0xfc03, i_d)                   \
586  V_SIG(I64SConvertSatF32, 0xfc04, l_f)                   \
587  V_SIG(I64UConvertSatF32, 0xfc05, l_f)                   \
588  V_SIG(I64SConvertSatF64, 0xfc06, l_d)                   \
589  V_SIG(I64UConvertSatF64, 0xfc07, l_d)                   \
590  V_VARIADIC(MemoryInit, 0xfc08)                          \
591  V_SIG(DataDrop, 0xfc09, v_v)                            \
592  V_VARIADIC(MemoryCopy, 0xfc0a)                          \
593  V_VARIADIC(MemoryFill, 0xfc0b)                          \
594  V_SIG(TableInit, 0xfc0c, v_iii)                         \
595  V_SIG(ElemDrop, 0xfc0d, v_v)                            \
596  V_SIG(TableCopy, 0xfc0e, v_iii)                         \
597  /* TableGrow is polymorphic in the first parameter. */  \
598  /* It's whatever the table type is. */                  \
599  V_VARIADIC(TableGrow, 0xfc0f)                           \
600  V_SIG(TableSize, 0xfc10, i_v)                           \
601  /* TableFill is polymorphic in the second parameter. */ \
602  /* It's whatever the table type is. */                  \
603  V_VARIADIC(TableFill, 0xfc11)
604
605#define FOREACH_ATOMIC_OPCODE(V)                \
606  V(AtomicNotify, 0xfe00, i_ii)                 \
607  V(I32AtomicWait, 0xfe01, i_iil)               \
608  V(I64AtomicWait, 0xfe02, i_ill)               \
609  V(I32AtomicLoad, 0xfe10, i_i)                 \
610  V(I64AtomicLoad, 0xfe11, l_i)                 \
611  V(I32AtomicLoad8U, 0xfe12, i_i)               \
612  V(I32AtomicLoad16U, 0xfe13, i_i)              \
613  V(I64AtomicLoad8U, 0xfe14, l_i)               \
614  V(I64AtomicLoad16U, 0xfe15, l_i)              \
615  V(I64AtomicLoad32U, 0xfe16, l_i)              \
616  V(I32AtomicStore, 0xfe17, v_ii)               \
617  V(I64AtomicStore, 0xfe18, v_il)               \
618  V(I32AtomicStore8U, 0xfe19, v_ii)             \
619  V(I32AtomicStore16U, 0xfe1a, v_ii)            \
620  V(I64AtomicStore8U, 0xfe1b, v_il)             \
621  V(I64AtomicStore16U, 0xfe1c, v_il)            \
622  V(I64AtomicStore32U, 0xfe1d, v_il)            \
623  V(I32AtomicAdd, 0xfe1e, i_ii)                 \
624  V(I64AtomicAdd, 0xfe1f, l_il)                 \
625  V(I32AtomicAdd8U, 0xfe20, i_ii)               \
626  V(I32AtomicAdd16U, 0xfe21, i_ii)              \
627  V(I64AtomicAdd8U, 0xfe22, l_il)               \
628  V(I64AtomicAdd16U, 0xfe23, l_il)              \
629  V(I64AtomicAdd32U, 0xfe24, l_il)              \
630  V(I32AtomicSub, 0xfe25, i_ii)                 \
631  V(I64AtomicSub, 0xfe26, l_il)                 \
632  V(I32AtomicSub8U, 0xfe27, i_ii)               \
633  V(I32AtomicSub16U, 0xfe28, i_ii)              \
634  V(I64AtomicSub8U, 0xfe29, l_il)               \
635  V(I64AtomicSub16U, 0xfe2a, l_il)              \
636  V(I64AtomicSub32U, 0xfe2b, l_il)              \
637  V(I32AtomicAnd, 0xfe2c, i_ii)                 \
638  V(I64AtomicAnd, 0xfe2d, l_il)                 \
639  V(I32AtomicAnd8U, 0xfe2e, i_ii)               \
640  V(I32AtomicAnd16U, 0xfe2f, i_ii)              \
641  V(I64AtomicAnd8U, 0xfe30, l_il)               \
642  V(I64AtomicAnd16U, 0xfe31, l_il)              \
643  V(I64AtomicAnd32U, 0xfe32, l_il)              \
644  V(I32AtomicOr, 0xfe33, i_ii)                  \
645  V(I64AtomicOr, 0xfe34, l_il)                  \
646  V(I32AtomicOr8U, 0xfe35, i_ii)                \
647  V(I32AtomicOr16U, 0xfe36, i_ii)               \
648  V(I64AtomicOr8U, 0xfe37, l_il)                \
649  V(I64AtomicOr16U, 0xfe38, l_il)               \
650  V(I64AtomicOr32U, 0xfe39, l_il)               \
651  V(I32AtomicXor, 0xfe3a, i_ii)                 \
652  V(I64AtomicXor, 0xfe3b, l_il)                 \
653  V(I32AtomicXor8U, 0xfe3c, i_ii)               \
654  V(I32AtomicXor16U, 0xfe3d, i_ii)              \
655  V(I64AtomicXor8U, 0xfe3e, l_il)               \
656  V(I64AtomicXor16U, 0xfe3f, l_il)              \
657  V(I64AtomicXor32U, 0xfe40, l_il)              \
658  V(I32AtomicExchange, 0xfe41, i_ii)            \
659  V(I64AtomicExchange, 0xfe42, l_il)            \
660  V(I32AtomicExchange8U, 0xfe43, i_ii)          \
661  V(I32AtomicExchange16U, 0xfe44, i_ii)         \
662  V(I64AtomicExchange8U, 0xfe45, l_il)          \
663  V(I64AtomicExchange16U, 0xfe46, l_il)         \
664  V(I64AtomicExchange32U, 0xfe47, l_il)         \
665  V(I32AtomicCompareExchange, 0xfe48, i_iii)    \
666  V(I64AtomicCompareExchange, 0xfe49, l_ill)    \
667  V(I32AtomicCompareExchange8U, 0xfe4a, i_iii)  \
668  V(I32AtomicCompareExchange16U, 0xfe4b, i_iii) \
669  V(I64AtomicCompareExchange8U, 0xfe4c, l_ill)  \
670  V(I64AtomicCompareExchange16U, 0xfe4d, l_ill) \
671  V(I64AtomicCompareExchange32U, 0xfe4e, l_ill)
672
673#define FOREACH_ATOMIC_0_OPERAND_OPCODE(V)                      \
674  /* AtomicFence does not target a particular linear memory. */ \
675  V(AtomicFence, 0xfe03, v_v)
676
677#define FOREACH_GC_OPCODE(V) /*              Force 80 columns               */ \
678  V(StructNewWithRtt, 0xfb01, _)                                               \
679  V(StructNewDefaultWithRtt, 0xfb02, _)                                        \
680  V(StructGet, 0xfb03, _)                                                      \
681  V(StructGetS, 0xfb04, _)                                                     \
682  V(StructGetU, 0xfb05, _)                                                     \
683  V(StructSet, 0xfb06, _)                                                      \
684  V(StructNew, 0xfb07, _)                                                      \
685  V(StructNewDefault, 0xfb08, _)                                               \
686  V(ArrayNewWithRtt, 0xfb11, _)                                                \
687  V(ArrayNewDefaultWithRtt, 0xfb12, _)                                         \
688  V(ArrayGet, 0xfb13, _)                                                       \
689  V(ArrayGetS, 0xfb14, _)                                                      \
690  V(ArrayGetU, 0xfb15, _)                                                      \
691  V(ArraySet, 0xfb16, _)                                                       \
692  V(ArrayLen, 0xfb17, _)                                                       \
693  V(ArrayCopy, 0xfb18, _)       /* not standardized - V8 experimental */       \
694  V(ArrayInit, 0xfb19, _)       /* not standardized - V8 experimental */       \
695  V(ArrayInitStatic, 0xfb1a, _) /* not standardized - V8 experimental */       \
696  V(ArrayNew, 0xfb1b, _)                                                       \
697  V(ArrayNewDefault, 0xfb1c, _)                                                \
698  V(ArrayInitFromData, 0xfb1e, _)       /* not stand. - V8 experimental */     \
699  V(ArrayInitFromDataStatic, 0xfb1d, _) /* not stand. - V8 experimental */     \
700  V(I31New, 0xfb20, _)                                                         \
701  V(I31GetS, 0xfb21, _)                                                        \
702  V(I31GetU, 0xfb22, _)                                                        \
703  V(RttCanon, 0xfb30, _)                                                       \
704  V(RefTest, 0xfb40, _)                                                        \
705  V(RefCast, 0xfb41, _)                                                        \
706  V(BrOnCast, 0xfb42, _)                                                       \
707  V(BrOnCastFail, 0xfb43, _)                                                   \
708  V(RefTestStatic, 0xfb44, _)                                                  \
709  V(RefCastStatic, 0xfb45, _)                                                  \
710  V(BrOnCastStatic, 0xfb46, _)                                                 \
711  V(BrOnCastStaticFail, 0xfb47, _)                                             \
712  V(RefIsFunc, 0xfb50, _)                                                      \
713  V(RefIsData, 0xfb51, _)                                                      \
714  V(RefIsI31, 0xfb52, _)                                                       \
715  V(RefIsArray, 0xfb53, _)                                                     \
716  V(RefAsFunc, 0xfb58, _)                                                      \
717  V(RefAsData, 0xfb59, _)                                                      \
718  V(RefAsI31, 0xfb5a, _)                                                       \
719  V(RefAsArray, 0xfb5b, _)                                                     \
720  V(BrOnFunc, 0xfb60, _)                                                       \
721  V(BrOnData, 0xfb61, _)                                                       \
722  V(BrOnI31, 0xfb62, _)                                                        \
723  V(BrOnArray, 0xfb66, _)                                                      \
724  V(BrOnNonFunc, 0xfb63, _)                                                    \
725  V(BrOnNonData, 0xfb64, _)                                                    \
726  V(BrOnNonI31, 0xfb65, _)                                                     \
727  V(BrOnNonArray, 0xfb67, _)
728
729// All opcodes.
730#define FOREACH_OPCODE(V)            \
731  FOREACH_CONTROL_OPCODE(V)          \
732  FOREACH_MISC_OPCODE(V)             \
733  FOREACH_SIMPLE_OPCODE(V)           \
734  FOREACH_SIMPLE_PROTOTYPE_OPCODE(V) \
735  FOREACH_STORE_MEM_OPCODE(V)        \
736  FOREACH_LOAD_MEM_OPCODE(V)         \
737  FOREACH_MISC_MEM_OPCODE(V)         \
738  FOREACH_ASMJS_COMPAT_OPCODE(V)     \
739  FOREACH_SIMD_OPCODE(V)             \
740  FOREACH_ATOMIC_OPCODE(V)           \
741  FOREACH_ATOMIC_0_OPERAND_OPCODE(V) \
742  FOREACH_NUMERIC_OPCODE(V, V)       \
743  FOREACH_GC_OPCODE(V)
744
745// All signatures.
746#define FOREACH_SIGNATURE(V)                        \
747  FOREACH_SIMD_SIGNATURE(V)                         \
748  V(v_v, kWasmVoid)                                 \
749  V(i_ii, kWasmI32, kWasmI32, kWasmI32)             \
750  V(i_i, kWasmI32, kWasmI32)                        \
751  V(i_v, kWasmI32)                                  \
752  V(i_ff, kWasmI32, kWasmF32, kWasmF32)             \
753  V(i_f, kWasmI32, kWasmF32)                        \
754  V(i_dd, kWasmI32, kWasmF64, kWasmF64)             \
755  V(i_d, kWasmI32, kWasmF64)                        \
756  V(i_l, kWasmI32, kWasmI64)                        \
757  V(l_ll, kWasmI64, kWasmI64, kWasmI64)             \
758  V(i_ll, kWasmI32, kWasmI64, kWasmI64)             \
759  V(l_l, kWasmI64, kWasmI64)                        \
760  V(l_i, kWasmI64, kWasmI32)                        \
761  V(l_f, kWasmI64, kWasmF32)                        \
762  V(l_d, kWasmI64, kWasmF64)                        \
763  V(f_ff, kWasmF32, kWasmF32, kWasmF32)             \
764  V(f_f, kWasmF32, kWasmF32)                        \
765  V(f_d, kWasmF32, kWasmF64)                        \
766  V(f_i, kWasmF32, kWasmI32)                        \
767  V(f_l, kWasmF32, kWasmI64)                        \
768  V(d_dd, kWasmF64, kWasmF64, kWasmF64)             \
769  V(d_d, kWasmF64, kWasmF64)                        \
770  V(d_f, kWasmF64, kWasmF32)                        \
771  V(d_i, kWasmF64, kWasmI32)                        \
772  V(d_l, kWasmF64, kWasmI64)                        \
773  V(v_i, kWasmVoid, kWasmI32)                       \
774  V(v_ii, kWasmVoid, kWasmI32, kWasmI32)            \
775  V(v_id, kWasmVoid, kWasmI32, kWasmF64)            \
776  V(d_id, kWasmF64, kWasmI32, kWasmF64)             \
777  V(v_if, kWasmVoid, kWasmI32, kWasmF32)            \
778  V(f_if, kWasmF32, kWasmI32, kWasmF32)             \
779  V(v_il, kWasmVoid, kWasmI32, kWasmI64)            \
780  V(l_il, kWasmI64, kWasmI32, kWasmI64)             \
781  V(v_iii, kWasmVoid, kWasmI32, kWasmI32, kWasmI32) \
782  V(i_iii, kWasmI32, kWasmI32, kWasmI32, kWasmI32)  \
783  V(l_ill, kWasmI64, kWasmI32, kWasmI64, kWasmI64)  \
784  V(i_iil, kWasmI32, kWasmI32, kWasmI32, kWasmI64)  \
785  V(i_ill, kWasmI32, kWasmI32, kWasmI64, kWasmI64)  \
786  V(i_a, kWasmI32, kWasmAnyRef)                     \
787  V(i_ci, kWasmI32, kWasmFuncRef, kWasmI32)         \
788  V(i_qq, kWasmI32, kWasmEqRef, kWasmEqRef)
789
790#define FOREACH_SIMD_SIGNATURE(V)                      \
791  V(s_s, kWasmS128, kWasmS128)                         \
792  V(s_f, kWasmS128, kWasmF32)                          \
793  V(s_d, kWasmS128, kWasmF64)                          \
794  V(s_ss, kWasmS128, kWasmS128, kWasmS128)             \
795  V(s_i, kWasmS128, kWasmI32)                          \
796  V(s_l, kWasmS128, kWasmI64)                          \
797  V(s_si, kWasmS128, kWasmS128, kWasmI32)              \
798  V(i_s, kWasmI32, kWasmS128)                          \
799  V(v_is, kWasmVoid, kWasmI32, kWasmS128)              \
800  V(s_sss, kWasmS128, kWasmS128, kWasmS128, kWasmS128) \
801  V(s_is, kWasmS128, kWasmI32, kWasmS128)
802
803#define FOREACH_PREFIX(V) \
804  V(GC, 0xfb)             \
805  V(Numeric, 0xfc)        \
806  V(Simd, 0xfd)           \
807  V(Atomic, 0xfe)
808
809enum WasmOpcode {
810// Declare expression opcodes.
811#define DECLARE_NAMED_ENUM(name, opcode, ...) kExpr##name = opcode,
812  FOREACH_OPCODE(DECLARE_NAMED_ENUM)
813#undef DECLARE_NAMED_ENUM
814#define DECLARE_PREFIX(name, opcode) k##name##Prefix = opcode,
815      FOREACH_PREFIX(DECLARE_PREFIX)
816#undef DECLARE_PREFIX
817};
818
819enum TrapReason {
820#define DECLARE_ENUM(name) k##name,
821  FOREACH_WASM_TRAPREASON(DECLARE_ENUM)
822  kTrapCount
823#undef DECLARE_ENUM
824};
825
826// A collection of opcode-related static methods.
827class V8_EXPORT_PRIVATE WasmOpcodes {
828 public:
829  static constexpr const char* OpcodeName(WasmOpcode);
830  static constexpr const FunctionSig* Signature(WasmOpcode);
831  static constexpr const FunctionSig* AsmjsSignature(WasmOpcode);
832  static constexpr bool IsPrefixOpcode(WasmOpcode);
833  static constexpr bool IsControlOpcode(WasmOpcode);
834  static constexpr bool IsExternRefOpcode(WasmOpcode);
835  static constexpr bool IsThrowingOpcode(WasmOpcode);
836  static constexpr bool IsRelaxedSimdOpcode(WasmOpcode);
837  // Check whether the given opcode always jumps, i.e. all instructions after
838  // this one in the current block are dead. Returns false for |end|.
839  static constexpr bool IsUnconditionalJump(WasmOpcode);
840  static constexpr bool IsBreakable(WasmOpcode);
841
842  static constexpr MessageTemplate TrapReasonToMessageId(TrapReason);
843  static inline const char* TrapReasonMessage(TrapReason);
844};
845
846}  // namespace wasm
847}  // namespace internal
848}  // namespace v8
849
850#endif  // V8_WASM_WASM_OPCODES_H_
851