Lines Matching refs:src

57 void AssemblerX64::Addq(Immediate src, Register dst)
60 if (InRange8(src.Value())) {
65 EmitI8(static_cast<int8_t>(src.Value()));
69 EmitI32(src.Value());
75 EmitI32(src.Value());
79 void AssemblerX64::Addq(Register src, Register dst)
81 EmitRexPrefix(dst, src);
84 EmitModrm(dst, src);
87 void AssemblerX64::Addl(Immediate src, Register dst)
90 if (InRange8(src.Value())) {
95 EmitI8(static_cast<int8_t>(src.Value()));
99 EmitI32(src.Value());
105 EmitI32(src.Value());
109 void AssemblerX64::Subq(Immediate src, Register dst)
112 if (InRange8(src.Value())) {
117 EmitI8(static_cast<int8_t>(src.Value()));
121 EmitI32(src.Value());
127 EmitI32(src.Value());
131 void AssemblerX64::Subq(Register src, Register dst)
133 EmitRexPrefix(src, dst);
136 EmitModrm(src, dst);
139 void AssemblerX64::Subl(Immediate src, Register dst)
142 if (InRange8(src.Value())) {
147 EmitI8(static_cast<int8_t>(src.Value()));
151 EmitI32(src.Value());
157 EmitI32(src.Value());
161 void AssemblerX64::Cmpq(Immediate src, Register dst)
164 if (InRange8(src.Value())) {
169 EmitI8(static_cast<int8_t>(src.Value()));
173 EmitI32(src.Value());
179 EmitI32(src.Value());
183 void AssemblerX64::Cmpb(Immediate src, Register dst)
186 if (InRange8(src.Value())) {
191 EmitI8(static_cast<int8_t>(src.Value()));
195 EmitI8(src.Value());
202 void AssemblerX64::Cmpq(Register src, Register dst)
204 EmitRexPrefix(src, dst);
207 EmitModrm(src, dst);
210 void AssemblerX64::Cmpl(Immediate src, Register dst)
213 if (InRange8(src.Value())) {
218 EmitI8(static_cast<int8_t>(src.Value()));
222 EmitI32(src.Value());
228 EmitI32(src.Value());
232 void AssemblerX64::Cmp(Immediate src, Register dst)
234 Cmpq(src, dst);
237 void AssemblerX64::Movq(Register src, Register dst)
239 EmitRexPrefix(src, dst);
242 EmitModrm(src, dst);
245 void AssemblerX64::Mov(Register src, Register dst)
247 EmitRexPrefixl(dst, src);
250 EmitModrm(dst, src);
265 void AssemblerX64::Movq(const Operand &src, Register dst)
267 EmitRexPrefix(dst, src);
270 EmitOperand(dst, src);
273 void AssemblerX64::Movq(Register src, const Operand &dst)
275 EmitRexPrefix(src, dst);
278 EmitOperand(src, dst);
281 void AssemblerX64::Movq(Immediate src, Operand dst)
288 EmitI32(src.Value());
291 void AssemblerX64::Movq(Immediate src, Register dst)
296 EmitI32(src.Value());
299 void AssemblerX64::Mov(const Operand &src, Register dst)
301 Movq(src, dst);
853 void AssemblerX64::Movl(Register src, Register dst)
855 EmitRexPrefixl(src, dst);
858 EmitModrm(src, dst);
861 void AssemblerX64::Movl(const Operand &src, Register dst)
863 EmitRexPrefixl(dst, src);
866 EmitOperand(dst, src);
869 void AssemblerX64::Movl(Register src, const Operand& dst)
871 EmitRexPrefixl(src, dst);
874 EmitOperand(src, dst);
877 void AssemblerX64::Testq(Immediate src, Register dst)
879 if (InRange8(src.Value())) {
880 Testb(src, dst);
884 EmitI32(src.Value());
891 EmitI32(src.Value());
895 void AssemblerX64::Testb(Immediate src, Register dst)
897 ASSERT(InRange8(src.Value()));
912 EmitI8(static_cast<int8_t>(src.Value()));
947 void AssemblerX64::Cmpl(Register src, Register dst)
949 EmitRexPrefixl(src, dst);
952 EmitModrm(src, dst);
987 void AssemblerX64::CMovbe(Register src, Register dst)
989 EmitRexPrefixl(dst, src);
993 EmitModrm(dst, src);
996 void AssemblerX64::Leaq(const Operand &src, Register dst)
998 EmitRexPrefix(dst, src);
1001 EmitOperand(dst, src);
1004 void AssemblerX64::Leal(const Operand &src, Register dst)
1006 EmitRexPrefixl(dst, src);
1009 EmitOperand(dst, src);
1012 void AssemblerX64::Shrq(Immediate src, Register dst)
1019 EmitI8(static_cast<int8_t>(src.Value()));
1022 void AssemblerX64::Shrl(Immediate src, Register dst)
1029 EmitI8(static_cast<int8_t>(src.Value()));
1032 void AssemblerX64::Shr(Immediate src, Register dst)
1034 Shrq(src, dst);
1037 void AssemblerX64::Andq(Immediate src, Register dst)
1040 if (InRange8(src.Value())) {
1045 EmitI8(static_cast<int8_t>(src.Value()));
1049 EmitI32(src.Value());
1055 EmitI32(src.Value());
1059 void AssemblerX64::Andl(Immediate src, Register dst)
1062 if (InRange8(src.Value())) {
1067 EmitI8(static_cast<int8_t>(src.Value()));
1071 EmitI32(src.Value());
1077 EmitI32(src.Value());
1081 void AssemblerX64::And(Register src, Register dst)
1083 EmitRexPrefix(src, dst);
1086 EmitModrm(src, dst);
1089 void AssemblerX64::Or(Immediate src, Register dst)
1092 if (InRange8(src.Value())) {
1097 EmitI8(static_cast<int8_t>(src.Value()));
1101 EmitI32(src.Value());
1107 EmitI32(src.Value());
1111 void AssemblerX64::Orq(Register src, Register dst)
1113 EmitRexPrefix(src, dst);
1116 EmitModrm(src, dst);
1279 void AssemblerX64::Movzbq(const Operand &src, Register dst)
1281 EmitRexPrefix(dst, src);
1286 EmitOperand(dst, src);
1289 void AssemblerX64::Movzbl(const Operand &src, Register dst)
1291 EmitRexPrefixl(dst, src);
1296 EmitOperand(dst, src);
1299 void AssemblerX64::Movzbl(Register src, Register dst)
1301 EmitRexPrefixl(dst, src);
1306 EmitModrm(dst, src);
1309 void AssemblerX64::Btq(Immediate src, Register dst)
1317 EmitI8(static_cast<int8_t>(src.Value()));
1319 void AssemblerX64::Btl(Immediate src, Register dst)
1327 EmitI8(static_cast<int8_t>(src.Value()));
1330 void AssemblerX64::Movabs(uint64_t src, Register dst)
1335 EmitU64(src);
1338 void AssemblerX64::Shll(Immediate src, Register dst)
1345 EmitI8(static_cast<int8_t>(src.Value()));
1348 void AssemblerX64::Shlq(Immediate src, Register dst)
1355 EmitI8(static_cast<int8_t>(src.Value()));
1358 void AssemblerX64::Btsl(Register src, Register dst)
1360 EmitRexPrefixl(src, dst);
1365 EmitModrm(src, dst);
1374 void AssemblerX64::Movzwq(const Operand &src, Register dst)
1376 EmitRexPrefix(dst, src);
1379 EmitOperand(dst, src);