1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #if V8_TARGET_ARCH_IA32
6
7 #include "src/base/bits.h"
8 #include "src/base/division-by-constant.h"
9 #include "src/base/utils/random-number-generator.h"
10 #include "src/bootstrapper.h"
11 #include "src/callable.h"
12 #include "src/code-factory.h"
13 #include "src/code-stubs.h"
14 #include "src/debug/debug.h"
15 #include "src/external-reference-table.h"
16 #include "src/frame-constants.h"
17 #include "src/frames-inl.h"
18 #include "src/instruction-stream.h"
19 #include "src/runtime/runtime.h"
20
21 #include "src/ia32/assembler-ia32-inl.h"
22 #include "src/ia32/macro-assembler-ia32.h"
23
24 namespace v8 {
25 namespace internal {
26
27 // -------------------------------------------------------------------------
28 // MacroAssembler implementation.
29
MacroAssembler(Isolate * isolate,void * buffer,int size,CodeObjectRequired create_code_object)30 MacroAssembler::MacroAssembler(Isolate* isolate, void* buffer, int size,
31 CodeObjectRequired create_code_object)
32 : TurboAssembler(isolate, buffer, size, create_code_object) {
33 if (create_code_object == CodeObjectRequired::kYes) {
34 // Unlike TurboAssembler, which can be used off the main thread and may not
35 // allocate, macro assembler creates its own copy of the self-reference
36 // marker in order to disambiguate between self-references during nested
37 // code generation (e.g.: codegen of the current object triggers stub
38 // compilation through CodeStub::GetCode()).
39 code_object_ = Handle<HeapObject>::New(
40 *isolate->factory()->NewSelfReferenceMarker(), isolate);
41 }
42 }
43
TurboAssembler(Isolate * isolate,void * buffer,int buffer_size,CodeObjectRequired create_code_object)44 TurboAssembler::TurboAssembler(Isolate* isolate, void* buffer, int buffer_size,
45 CodeObjectRequired create_code_object)
46 : Assembler(isolate, buffer, buffer_size), isolate_(isolate) {
47 if (create_code_object == CodeObjectRequired::kYes) {
48 code_object_ = Handle<HeapObject>::New(
49 isolate->heap()->self_reference_marker(), isolate);
50 }
51 }
52
LoadRoot(Register destination,Heap::RootListIndex index)53 void MacroAssembler::LoadRoot(Register destination, Heap::RootListIndex index) {
54 if (isolate()->heap()->RootCanBeTreatedAsConstant(index)) {
55 Handle<Object> object = isolate()->heap()->root_handle(index);
56 if (object->IsHeapObject()) {
57 mov(destination, Handle<HeapObject>::cast(object));
58 } else {
59 mov(destination, Immediate(Smi::cast(*object)));
60 }
61 return;
62 }
63 ExternalReference roots_array_start =
64 ExternalReference::roots_array_start(isolate());
65 mov(destination, Immediate(index));
66 mov(destination, Operand::StaticArray(destination,
67 times_pointer_size,
68 roots_array_start));
69 }
70
71
CompareRoot(Register with,Register scratch,Heap::RootListIndex index)72 void MacroAssembler::CompareRoot(Register with,
73 Register scratch,
74 Heap::RootListIndex index) {
75 ExternalReference roots_array_start =
76 ExternalReference::roots_array_start(isolate());
77 mov(scratch, Immediate(index));
78 cmp(with, Operand::StaticArray(scratch,
79 times_pointer_size,
80 roots_array_start));
81 }
82
83
CompareRoot(Register with,Heap::RootListIndex index)84 void MacroAssembler::CompareRoot(Register with, Heap::RootListIndex index) {
85 DCHECK(isolate()->heap()->RootCanBeTreatedAsConstant(index));
86 Handle<Object> object = isolate()->heap()->root_handle(index);
87 if (object->IsHeapObject()) {
88 cmp(with, Handle<HeapObject>::cast(object));
89 } else {
90 cmp(with, Immediate(Smi::cast(*object)));
91 }
92 }
93
CompareRoot(Operand with,Heap::RootListIndex index)94 void MacroAssembler::CompareRoot(Operand with, Heap::RootListIndex index) {
95 DCHECK(isolate()->heap()->RootCanBeTreatedAsConstant(index));
96 Handle<Object> object = isolate()->heap()->root_handle(index);
97 if (object->IsHeapObject()) {
98 cmp(with, Handle<HeapObject>::cast(object));
99 } else {
100 cmp(with, Immediate(Smi::cast(*object)));
101 }
102 }
103
PushRoot(Heap::RootListIndex index)104 void MacroAssembler::PushRoot(Heap::RootListIndex index) {
105 DCHECK(isolate()->heap()->RootCanBeTreatedAsConstant(index));
106 Handle<Object> object = isolate()->heap()->root_handle(index);
107 if (object->IsHeapObject()) {
108 Push(Handle<HeapObject>::cast(object));
109 } else {
110 Push(Smi::cast(*object));
111 }
112 }
113
114 static constexpr Register saved_regs[] = {eax, ecx, edx};
115
116 static constexpr int kNumberOfSavedRegs = sizeof(saved_regs) / sizeof(Register);
117
RequiredStackSizeForCallerSaved(SaveFPRegsMode fp_mode,Register exclusion1,Register exclusion2,Register exclusion3) const118 int TurboAssembler::RequiredStackSizeForCallerSaved(SaveFPRegsMode fp_mode,
119 Register exclusion1,
120 Register exclusion2,
121 Register exclusion3) const {
122 int bytes = 0;
123 for (int i = 0; i < kNumberOfSavedRegs; i++) {
124 Register reg = saved_regs[i];
125 if (reg != exclusion1 && reg != exclusion2 && reg != exclusion3) {
126 bytes += kPointerSize;
127 }
128 }
129
130 if (fp_mode == kSaveFPRegs) {
131 // Count all XMM registers except XMM0.
132 bytes += kDoubleSize * (XMMRegister::kNumRegisters - 1);
133 }
134
135 return bytes;
136 }
137
PushCallerSaved(SaveFPRegsMode fp_mode,Register exclusion1,Register exclusion2,Register exclusion3)138 int TurboAssembler::PushCallerSaved(SaveFPRegsMode fp_mode, Register exclusion1,
139 Register exclusion2, Register exclusion3) {
140 // We don't allow a GC during a store buffer overflow so there is no need to
141 // store the registers in any particular way, but we do have to store and
142 // restore them.
143 int bytes = 0;
144 for (int i = 0; i < kNumberOfSavedRegs; i++) {
145 Register reg = saved_regs[i];
146 if (reg != exclusion1 && reg != exclusion2 && reg != exclusion3) {
147 push(reg);
148 bytes += kPointerSize;
149 }
150 }
151
152 if (fp_mode == kSaveFPRegs) {
153 // Save all XMM registers except XMM0.
154 int delta = kDoubleSize * (XMMRegister::kNumRegisters - 1);
155 sub(esp, Immediate(delta));
156 for (int i = XMMRegister::kNumRegisters - 1; i > 0; i--) {
157 XMMRegister reg = XMMRegister::from_code(i);
158 movsd(Operand(esp, (i - 1) * kDoubleSize), reg);
159 }
160 bytes += delta;
161 }
162
163 return bytes;
164 }
165
PopCallerSaved(SaveFPRegsMode fp_mode,Register exclusion1,Register exclusion2,Register exclusion3)166 int TurboAssembler::PopCallerSaved(SaveFPRegsMode fp_mode, Register exclusion1,
167 Register exclusion2, Register exclusion3) {
168 int bytes = 0;
169 if (fp_mode == kSaveFPRegs) {
170 // Restore all XMM registers except XMM0.
171 int delta = kDoubleSize * (XMMRegister::kNumRegisters - 1);
172 for (int i = XMMRegister::kNumRegisters - 1; i > 0; i--) {
173 XMMRegister reg = XMMRegister::from_code(i);
174 movsd(reg, Operand(esp, (i - 1) * kDoubleSize));
175 }
176 add(esp, Immediate(delta));
177 bytes += delta;
178 }
179
180 for (int i = kNumberOfSavedRegs - 1; i >= 0; i--) {
181 Register reg = saved_regs[i];
182 if (reg != exclusion1 && reg != exclusion2 && reg != exclusion3) {
183 pop(reg);
184 bytes += kPointerSize;
185 }
186 }
187
188 return bytes;
189 }
190
DoubleToI(Register result_reg,XMMRegister input_reg,XMMRegister scratch,Label * lost_precision,Label * is_nan,Label::Distance dst)191 void MacroAssembler::DoubleToI(Register result_reg, XMMRegister input_reg,
192 XMMRegister scratch, Label* lost_precision,
193 Label* is_nan, Label::Distance dst) {
194 DCHECK(input_reg != scratch);
195 cvttsd2si(result_reg, Operand(input_reg));
196 Cvtsi2sd(scratch, Operand(result_reg));
197 ucomisd(scratch, input_reg);
198 j(not_equal, lost_precision, dst);
199 j(parity_even, is_nan, dst);
200 }
201
RecordWriteField(Register object,int offset,Register value,Register dst,SaveFPRegsMode save_fp,RememberedSetAction remembered_set_action,SmiCheck smi_check)202 void MacroAssembler::RecordWriteField(Register object, int offset,
203 Register value, Register dst,
204 SaveFPRegsMode save_fp,
205 RememberedSetAction remembered_set_action,
206 SmiCheck smi_check) {
207 // First, check if a write barrier is even needed. The tests below
208 // catch stores of Smis.
209 Label done;
210
211 // Skip barrier if writing a smi.
212 if (smi_check == INLINE_SMI_CHECK) {
213 JumpIfSmi(value, &done, Label::kNear);
214 }
215
216 // Although the object register is tagged, the offset is relative to the start
217 // of the object, so so offset must be a multiple of kPointerSize.
218 DCHECK(IsAligned(offset, kPointerSize));
219
220 lea(dst, FieldOperand(object, offset));
221 if (emit_debug_code()) {
222 Label ok;
223 test_b(dst, Immediate(kPointerSize - 1));
224 j(zero, &ok, Label::kNear);
225 int3();
226 bind(&ok);
227 }
228
229 RecordWrite(object, dst, value, save_fp, remembered_set_action,
230 OMIT_SMI_CHECK);
231
232 bind(&done);
233
234 // Clobber clobbered input registers when running with the debug-code flag
235 // turned on to provoke errors.
236 if (emit_debug_code()) {
237 mov(value, Immediate(bit_cast<int32_t>(kZapValue)));
238 mov(dst, Immediate(bit_cast<int32_t>(kZapValue)));
239 }
240 }
241
SaveRegisters(RegList registers)242 void TurboAssembler::SaveRegisters(RegList registers) {
243 DCHECK_GT(NumRegs(registers), 0);
244 for (int i = 0; i < Register::kNumRegisters; ++i) {
245 if ((registers >> i) & 1u) {
246 push(Register::from_code(i));
247 }
248 }
249 }
250
RestoreRegisters(RegList registers)251 void TurboAssembler::RestoreRegisters(RegList registers) {
252 DCHECK_GT(NumRegs(registers), 0);
253 for (int i = Register::kNumRegisters - 1; i >= 0; --i) {
254 if ((registers >> i) & 1u) {
255 pop(Register::from_code(i));
256 }
257 }
258 }
259
CallRecordWriteStub(Register object,Register address,RememberedSetAction remembered_set_action,SaveFPRegsMode fp_mode)260 void TurboAssembler::CallRecordWriteStub(
261 Register object, Register address,
262 RememberedSetAction remembered_set_action, SaveFPRegsMode fp_mode) {
263 // TODO(albertnetymk): For now we ignore remembered_set_action and fp_mode,
264 // i.e. always emit remember set and save FP registers in RecordWriteStub. If
265 // large performance regression is observed, we should use these values to
266 // avoid unnecessary work.
267
268 Callable const callable =
269 Builtins::CallableFor(isolate(), Builtins::kRecordWrite);
270 RegList registers = callable.descriptor().allocatable_registers();
271
272 SaveRegisters(registers);
273
274 Register object_parameter(callable.descriptor().GetRegisterParameter(
275 RecordWriteDescriptor::kObject));
276 Register slot_parameter(
277 callable.descriptor().GetRegisterParameter(RecordWriteDescriptor::kSlot));
278 Register isolate_parameter(callable.descriptor().GetRegisterParameter(
279 RecordWriteDescriptor::kIsolate));
280 Register remembered_set_parameter(callable.descriptor().GetRegisterParameter(
281 RecordWriteDescriptor::kRememberedSet));
282 Register fp_mode_parameter(callable.descriptor().GetRegisterParameter(
283 RecordWriteDescriptor::kFPMode));
284
285 push(object);
286 push(address);
287
288 pop(slot_parameter);
289 pop(object_parameter);
290
291 mov(isolate_parameter,
292 Immediate(ExternalReference::isolate_address(isolate())));
293 Move(remembered_set_parameter, Smi::FromEnum(remembered_set_action));
294 Move(fp_mode_parameter, Smi::FromEnum(fp_mode));
295 Call(callable.code(), RelocInfo::CODE_TARGET);
296
297 RestoreRegisters(registers);
298 }
299
RecordWrite(Register object,Register address,Register value,SaveFPRegsMode fp_mode,RememberedSetAction remembered_set_action,SmiCheck smi_check)300 void MacroAssembler::RecordWrite(Register object, Register address,
301 Register value, SaveFPRegsMode fp_mode,
302 RememberedSetAction remembered_set_action,
303 SmiCheck smi_check) {
304 DCHECK(object != value);
305 DCHECK(object != address);
306 DCHECK(value != address);
307 AssertNotSmi(object);
308
309 if (remembered_set_action == OMIT_REMEMBERED_SET &&
310 !FLAG_incremental_marking) {
311 return;
312 }
313
314 if (emit_debug_code()) {
315 Label ok;
316 cmp(value, Operand(address, 0));
317 j(equal, &ok, Label::kNear);
318 int3();
319 bind(&ok);
320 }
321
322 // First, check if a write barrier is even needed. The tests below
323 // catch stores of Smis and stores into young gen.
324 Label done;
325
326 if (smi_check == INLINE_SMI_CHECK) {
327 // Skip barrier if writing a smi.
328 JumpIfSmi(value, &done, Label::kNear);
329 }
330
331 CheckPageFlag(value,
332 value, // Used as scratch.
333 MemoryChunk::kPointersToHereAreInterestingMask, zero, &done,
334 Label::kNear);
335 CheckPageFlag(object,
336 value, // Used as scratch.
337 MemoryChunk::kPointersFromHereAreInterestingMask,
338 zero,
339 &done,
340 Label::kNear);
341
342 CallRecordWriteStub(object, address, remembered_set_action, fp_mode);
343
344 bind(&done);
345
346 // Count number of write barriers in generated code.
347 isolate()->counters()->write_barriers_static()->Increment();
348 IncrementCounter(isolate()->counters()->write_barriers_dynamic(), 1);
349
350 // Clobber clobbered registers when running with the debug-code flag
351 // turned on to provoke errors.
352 if (emit_debug_code()) {
353 mov(address, Immediate(bit_cast<int32_t>(kZapValue)));
354 mov(value, Immediate(bit_cast<int32_t>(kZapValue)));
355 }
356 }
357
MaybeDropFrames()358 void MacroAssembler::MaybeDropFrames() {
359 // Check whether we need to drop frames to restart a function on the stack.
360 ExternalReference restart_fp =
361 ExternalReference::debug_restart_fp_address(isolate());
362 mov(ebx, Operand::StaticVariable(restart_fp));
363 test(ebx, ebx);
364 j(not_zero, BUILTIN_CODE(isolate(), FrameDropperTrampoline),
365 RelocInfo::CODE_TARGET);
366 }
367
Cvtsi2ss(XMMRegister dst,Operand src)368 void TurboAssembler::Cvtsi2ss(XMMRegister dst, Operand src) {
369 xorps(dst, dst);
370 cvtsi2ss(dst, src);
371 }
372
Cvtsi2sd(XMMRegister dst,Operand src)373 void TurboAssembler::Cvtsi2sd(XMMRegister dst, Operand src) {
374 xorpd(dst, dst);
375 cvtsi2sd(dst, src);
376 }
377
Cvtui2ss(XMMRegister dst,Operand src,Register tmp)378 void TurboAssembler::Cvtui2ss(XMMRegister dst, Operand src, Register tmp) {
379 Label done;
380 Register src_reg = src.is_reg_only() ? src.reg() : tmp;
381 if (src_reg == tmp) mov(tmp, src);
382 cvtsi2ss(dst, src_reg);
383 test(src_reg, src_reg);
384 j(positive, &done, Label::kNear);
385
386 // Compute {src/2 | (src&1)} (retain the LSB to avoid rounding errors).
387 if (src_reg != tmp) mov(tmp, src_reg);
388 shr(tmp, 1);
389 // The LSB is shifted into CF. If it is set, set the LSB in {tmp}.
390 Label msb_not_set;
391 j(not_carry, &msb_not_set, Label::kNear);
392 or_(tmp, Immediate(1));
393 bind(&msb_not_set);
394 cvtsi2ss(dst, tmp);
395 addss(dst, dst);
396 bind(&done);
397 }
398
Cvttss2ui(Register dst,Operand src,XMMRegister tmp)399 void TurboAssembler::Cvttss2ui(Register dst, Operand src, XMMRegister tmp) {
400 Label done;
401 cvttss2si(dst, src);
402 test(dst, dst);
403 j(positive, &done);
404 Move(tmp, static_cast<float>(INT32_MIN));
405 addss(tmp, src);
406 cvttss2si(dst, tmp);
407 or_(dst, Immediate(0x80000000));
408 bind(&done);
409 }
410
Cvtui2sd(XMMRegister dst,Operand src)411 void TurboAssembler::Cvtui2sd(XMMRegister dst, Operand src) {
412 Label done;
413 cmp(src, Immediate(0));
414 ExternalReference uint32_bias = ExternalReference::address_of_uint32_bias();
415 Cvtsi2sd(dst, src);
416 j(not_sign, &done, Label::kNear);
417 addsd(dst, Operand::StaticVariable(uint32_bias));
418 bind(&done);
419 }
420
Cvttsd2ui(Register dst,Operand src,XMMRegister tmp)421 void TurboAssembler::Cvttsd2ui(Register dst, Operand src, XMMRegister tmp) {
422 Move(tmp, -2147483648.0);
423 addsd(tmp, src);
424 cvttsd2si(dst, tmp);
425 add(dst, Immediate(0x80000000));
426 }
427
ShlPair(Register high,Register low,uint8_t shift)428 void TurboAssembler::ShlPair(Register high, Register low, uint8_t shift) {
429 if (shift >= 32) {
430 mov(high, low);
431 shl(high, shift - 32);
432 xor_(low, low);
433 } else {
434 shld(high, low, shift);
435 shl(low, shift);
436 }
437 }
438
ShlPair_cl(Register high,Register low)439 void TurboAssembler::ShlPair_cl(Register high, Register low) {
440 shld_cl(high, low);
441 shl_cl(low);
442 Label done;
443 test(ecx, Immediate(0x20));
444 j(equal, &done, Label::kNear);
445 mov(high, low);
446 xor_(low, low);
447 bind(&done);
448 }
449
ShrPair(Register high,Register low,uint8_t shift)450 void TurboAssembler::ShrPair(Register high, Register low, uint8_t shift) {
451 if (shift >= 32) {
452 mov(low, high);
453 shr(low, shift - 32);
454 xor_(high, high);
455 } else {
456 shrd(high, low, shift);
457 shr(high, shift);
458 }
459 }
460
ShrPair_cl(Register high,Register low)461 void TurboAssembler::ShrPair_cl(Register high, Register low) {
462 shrd_cl(low, high);
463 shr_cl(high);
464 Label done;
465 test(ecx, Immediate(0x20));
466 j(equal, &done, Label::kNear);
467 mov(low, high);
468 xor_(high, high);
469 bind(&done);
470 }
471
SarPair(Register high,Register low,uint8_t shift)472 void TurboAssembler::SarPair(Register high, Register low, uint8_t shift) {
473 if (shift >= 32) {
474 mov(low, high);
475 sar(low, shift - 32);
476 sar(high, 31);
477 } else {
478 shrd(high, low, shift);
479 sar(high, shift);
480 }
481 }
482
SarPair_cl(Register high,Register low)483 void TurboAssembler::SarPair_cl(Register high, Register low) {
484 shrd_cl(low, high);
485 sar_cl(high);
486 Label done;
487 test(ecx, Immediate(0x20));
488 j(equal, &done, Label::kNear);
489 mov(low, high);
490 sar(high, 31);
491 bind(&done);
492 }
493
CmpObjectType(Register heap_object,InstanceType type,Register map)494 void MacroAssembler::CmpObjectType(Register heap_object,
495 InstanceType type,
496 Register map) {
497 mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
498 CmpInstanceType(map, type);
499 }
500
501
CmpInstanceType(Register map,InstanceType type)502 void MacroAssembler::CmpInstanceType(Register map, InstanceType type) {
503 cmpw(FieldOperand(map, Map::kInstanceTypeOffset), Immediate(type));
504 }
505
AssertSmi(Register object)506 void MacroAssembler::AssertSmi(Register object) {
507 if (emit_debug_code()) {
508 test(object, Immediate(kSmiTagMask));
509 Check(equal, AbortReason::kOperandIsNotASmi);
510 }
511 }
512
AssertFixedArray(Register object)513 void MacroAssembler::AssertFixedArray(Register object) {
514 if (emit_debug_code()) {
515 test(object, Immediate(kSmiTagMask));
516 Check(not_equal, AbortReason::kOperandIsASmiAndNotAFixedArray);
517 Push(object);
518 CmpObjectType(object, FIXED_ARRAY_TYPE, object);
519 Pop(object);
520 Check(equal, AbortReason::kOperandIsNotAFixedArray);
521 }
522 }
523
AssertConstructor(Register object)524 void MacroAssembler::AssertConstructor(Register object) {
525 if (emit_debug_code()) {
526 test(object, Immediate(kSmiTagMask));
527 Check(not_equal, AbortReason::kOperandIsASmiAndNotAConstructor);
528 Push(object);
529 mov(object, FieldOperand(object, HeapObject::kMapOffset));
530 test_b(FieldOperand(object, Map::kBitFieldOffset),
531 Immediate(Map::IsConstructorBit::kMask));
532 Pop(object);
533 Check(not_zero, AbortReason::kOperandIsNotAConstructor);
534 }
535 }
536
AssertFunction(Register object)537 void MacroAssembler::AssertFunction(Register object) {
538 if (emit_debug_code()) {
539 test(object, Immediate(kSmiTagMask));
540 Check(not_equal, AbortReason::kOperandIsASmiAndNotAFunction);
541 Push(object);
542 CmpObjectType(object, JS_FUNCTION_TYPE, object);
543 Pop(object);
544 Check(equal, AbortReason::kOperandIsNotAFunction);
545 }
546 }
547
548
AssertBoundFunction(Register object)549 void MacroAssembler::AssertBoundFunction(Register object) {
550 if (emit_debug_code()) {
551 test(object, Immediate(kSmiTagMask));
552 Check(not_equal, AbortReason::kOperandIsASmiAndNotABoundFunction);
553 Push(object);
554 CmpObjectType(object, JS_BOUND_FUNCTION_TYPE, object);
555 Pop(object);
556 Check(equal, AbortReason::kOperandIsNotABoundFunction);
557 }
558 }
559
AssertGeneratorObject(Register object)560 void MacroAssembler::AssertGeneratorObject(Register object) {
561 if (!emit_debug_code()) return;
562
563 test(object, Immediate(kSmiTagMask));
564 Check(not_equal, AbortReason::kOperandIsASmiAndNotAGeneratorObject);
565
566 {
567 Push(object);
568 Register map = object;
569
570 // Load map
571 mov(map, FieldOperand(object, HeapObject::kMapOffset));
572
573 Label do_check;
574 // Check if JSGeneratorObject
575 CmpInstanceType(map, JS_GENERATOR_OBJECT_TYPE);
576 j(equal, &do_check, Label::kNear);
577
578 // Check if JSAsyncGeneratorObject
579 CmpInstanceType(map, JS_ASYNC_GENERATOR_OBJECT_TYPE);
580
581 bind(&do_check);
582 Pop(object);
583 }
584
585 Check(equal, AbortReason::kOperandIsNotAGeneratorObject);
586 }
587
AssertUndefinedOrAllocationSite(Register object)588 void MacroAssembler::AssertUndefinedOrAllocationSite(Register object) {
589 if (emit_debug_code()) {
590 Label done_checking;
591 AssertNotSmi(object);
592 cmp(object, isolate()->factory()->undefined_value());
593 j(equal, &done_checking);
594 cmp(FieldOperand(object, 0),
595 Immediate(isolate()->factory()->allocation_site_map()));
596 Assert(equal, AbortReason::kExpectedUndefinedOrCell);
597 bind(&done_checking);
598 }
599 }
600
601
AssertNotSmi(Register object)602 void MacroAssembler::AssertNotSmi(Register object) {
603 if (emit_debug_code()) {
604 test(object, Immediate(kSmiTagMask));
605 Check(not_equal, AbortReason::kOperandIsASmi);
606 }
607 }
608
StubPrologue(StackFrame::Type type)609 void TurboAssembler::StubPrologue(StackFrame::Type type) {
610 push(ebp); // Caller's frame pointer.
611 mov(ebp, esp);
612 push(Immediate(StackFrame::TypeToMarker(type)));
613 }
614
Prologue()615 void TurboAssembler::Prologue() {
616 push(ebp); // Caller's frame pointer.
617 mov(ebp, esp);
618 push(esi); // Callee's context.
619 push(edi); // Callee's JS function.
620 }
621
EnterFrame(StackFrame::Type type)622 void TurboAssembler::EnterFrame(StackFrame::Type type) {
623 push(ebp);
624 mov(ebp, esp);
625 push(Immediate(StackFrame::TypeToMarker(type)));
626 if (type == StackFrame::INTERNAL) {
627 push(Immediate(CodeObject()));
628 // Check at runtime that this code object was patched correctly.
629 if (emit_debug_code()) {
630 cmp(Operand(esp, 0), Immediate(isolate()->factory()->undefined_value()));
631 Check(not_equal, AbortReason::kCodeObjectNotProperlyPatched);
632 }
633 }
634 }
635
LeaveFrame(StackFrame::Type type)636 void TurboAssembler::LeaveFrame(StackFrame::Type type) {
637 if (emit_debug_code()) {
638 cmp(Operand(ebp, CommonFrameConstants::kContextOrFrameTypeOffset),
639 Immediate(StackFrame::TypeToMarker(type)));
640 Check(equal, AbortReason::kStackFrameTypesMustMatch);
641 }
642 leave();
643 }
644
EnterBuiltinFrame(Register context,Register target,Register argc)645 void MacroAssembler::EnterBuiltinFrame(Register context, Register target,
646 Register argc) {
647 Push(ebp);
648 Move(ebp, esp);
649 Push(context);
650 Push(target);
651 Push(argc);
652 }
653
LeaveBuiltinFrame(Register context,Register target,Register argc)654 void MacroAssembler::LeaveBuiltinFrame(Register context, Register target,
655 Register argc) {
656 Pop(argc);
657 Pop(target);
658 Pop(context);
659 leave();
660 }
661
EnterExitFramePrologue(StackFrame::Type frame_type)662 void MacroAssembler::EnterExitFramePrologue(StackFrame::Type frame_type) {
663 DCHECK(frame_type == StackFrame::EXIT ||
664 frame_type == StackFrame::BUILTIN_EXIT);
665
666 // Set up the frame structure on the stack.
667 DCHECK_EQ(+2 * kPointerSize, ExitFrameConstants::kCallerSPDisplacement);
668 DCHECK_EQ(+1 * kPointerSize, ExitFrameConstants::kCallerPCOffset);
669 DCHECK_EQ(0 * kPointerSize, ExitFrameConstants::kCallerFPOffset);
670 push(ebp);
671 mov(ebp, esp);
672
673 // Reserve room for entry stack pointer and push the code object.
674 push(Immediate(StackFrame::TypeToMarker(frame_type)));
675 DCHECK_EQ(-2 * kPointerSize, ExitFrameConstants::kSPOffset);
676 push(Immediate(0)); // Saved entry sp, patched before call.
677 DCHECK_EQ(-3 * kPointerSize, ExitFrameConstants::kCodeOffset);
678 push(Immediate(CodeObject())); // Accessed from ExitFrame::code_slot.
679
680 // Save the frame pointer and the context in top.
681 ExternalReference c_entry_fp_address =
682 ExternalReference::Create(IsolateAddressId::kCEntryFPAddress, isolate());
683 ExternalReference context_address =
684 ExternalReference::Create(IsolateAddressId::kContextAddress, isolate());
685 ExternalReference c_function_address =
686 ExternalReference::Create(IsolateAddressId::kCFunctionAddress, isolate());
687 mov(Operand::StaticVariable(c_entry_fp_address), ebp);
688 mov(Operand::StaticVariable(context_address), esi);
689 mov(Operand::StaticVariable(c_function_address), ebx);
690 }
691
692
EnterExitFrameEpilogue(int argc,bool save_doubles)693 void MacroAssembler::EnterExitFrameEpilogue(int argc, bool save_doubles) {
694 // Optionally save all XMM registers.
695 if (save_doubles) {
696 int space = XMMRegister::kNumRegisters * kDoubleSize + argc * kPointerSize;
697 sub(esp, Immediate(space));
698 const int offset = -ExitFrameConstants::kFixedFrameSizeFromFp;
699 for (int i = 0; i < XMMRegister::kNumRegisters; i++) {
700 XMMRegister reg = XMMRegister::from_code(i);
701 movsd(Operand(ebp, offset - ((i + 1) * kDoubleSize)), reg);
702 }
703 } else {
704 sub(esp, Immediate(argc * kPointerSize));
705 }
706
707 // Get the required frame alignment for the OS.
708 const int kFrameAlignment = base::OS::ActivationFrameAlignment();
709 if (kFrameAlignment > 0) {
710 DCHECK(base::bits::IsPowerOfTwo(kFrameAlignment));
711 and_(esp, -kFrameAlignment);
712 }
713
714 // Patch the saved entry sp.
715 mov(Operand(ebp, ExitFrameConstants::kSPOffset), esp);
716 }
717
EnterExitFrame(int argc,bool save_doubles,StackFrame::Type frame_type)718 void MacroAssembler::EnterExitFrame(int argc, bool save_doubles,
719 StackFrame::Type frame_type) {
720 EnterExitFramePrologue(frame_type);
721
722 // Set up argc and argv in callee-saved registers.
723 int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
724 mov(edi, eax);
725 lea(esi, Operand(ebp, eax, times_4, offset));
726
727 // Reserve space for argc, argv and isolate.
728 EnterExitFrameEpilogue(argc, save_doubles);
729 }
730
731
EnterApiExitFrame(int argc)732 void MacroAssembler::EnterApiExitFrame(int argc) {
733 EnterExitFramePrologue(StackFrame::EXIT);
734 EnterExitFrameEpilogue(argc, false);
735 }
736
737
LeaveExitFrame(bool save_doubles,bool pop_arguments)738 void MacroAssembler::LeaveExitFrame(bool save_doubles, bool pop_arguments) {
739 // Optionally restore all XMM registers.
740 if (save_doubles) {
741 const int offset = -ExitFrameConstants::kFixedFrameSizeFromFp;
742 for (int i = 0; i < XMMRegister::kNumRegisters; i++) {
743 XMMRegister reg = XMMRegister::from_code(i);
744 movsd(reg, Operand(ebp, offset - ((i + 1) * kDoubleSize)));
745 }
746 }
747
748 if (pop_arguments) {
749 // Get the return address from the stack and restore the frame pointer.
750 mov(ecx, Operand(ebp, 1 * kPointerSize));
751 mov(ebp, Operand(ebp, 0 * kPointerSize));
752
753 // Pop the arguments and the receiver from the caller stack.
754 lea(esp, Operand(esi, 1 * kPointerSize));
755
756 // Push the return address to get ready to return.
757 push(ecx);
758 } else {
759 // Otherwise just leave the exit frame.
760 leave();
761 }
762
763 LeaveExitFrameEpilogue();
764 }
765
LeaveExitFrameEpilogue()766 void MacroAssembler::LeaveExitFrameEpilogue() {
767 // Restore current context from top and clear it in debug mode.
768 ExternalReference context_address =
769 ExternalReference::Create(IsolateAddressId::kContextAddress, isolate());
770 mov(esi, Operand::StaticVariable(context_address));
771 #ifdef DEBUG
772 mov(Operand::StaticVariable(context_address),
773 Immediate(Context::kInvalidContext));
774 #endif
775
776 // Clear the top frame.
777 ExternalReference c_entry_fp_address =
778 ExternalReference::Create(IsolateAddressId::kCEntryFPAddress, isolate());
779 mov(Operand::StaticVariable(c_entry_fp_address), Immediate(0));
780 }
781
LeaveApiExitFrame()782 void MacroAssembler::LeaveApiExitFrame() {
783 mov(esp, ebp);
784 pop(ebp);
785
786 LeaveExitFrameEpilogue();
787 }
788
789
PushStackHandler()790 void MacroAssembler::PushStackHandler() {
791 // Adjust this code if not the case.
792 STATIC_ASSERT(StackHandlerConstants::kSize == 2 * kPointerSize);
793 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
794
795 push(Immediate(0)); // Padding.
796
797 // Link the current handler as the next handler.
798 ExternalReference handler_address =
799 ExternalReference::Create(IsolateAddressId::kHandlerAddress, isolate());
800 push(Operand::StaticVariable(handler_address));
801
802 // Set this new handler as the current one.
803 mov(Operand::StaticVariable(handler_address), esp);
804 }
805
806
PopStackHandler()807 void MacroAssembler::PopStackHandler() {
808 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
809 ExternalReference handler_address =
810 ExternalReference::Create(IsolateAddressId::kHandlerAddress, isolate());
811 pop(Operand::StaticVariable(handler_address));
812 add(esp, Immediate(StackHandlerConstants::kSize - kPointerSize));
813 }
814
815
CallStub(CodeStub * stub)816 void MacroAssembler::CallStub(CodeStub* stub) {
817 DCHECK(AllowThisStubCall(stub)); // Calls are not allowed in some stubs.
818 call(stub->GetCode(), RelocInfo::CODE_TARGET);
819 }
820
CallStubDelayed(CodeStub * stub)821 void TurboAssembler::CallStubDelayed(CodeStub* stub) {
822 DCHECK(AllowThisStubCall(stub)); // Calls are not allowed in some stubs.
823 call(stub);
824 }
825
TailCallStub(CodeStub * stub)826 void MacroAssembler::TailCallStub(CodeStub* stub) {
827 jmp(stub->GetCode(), RelocInfo::CODE_TARGET);
828 }
829
AllowThisStubCall(CodeStub * stub)830 bool TurboAssembler::AllowThisStubCall(CodeStub* stub) {
831 return has_frame() || !stub->SometimesSetsUpAFrame();
832 }
833
CallRuntime(const Runtime::Function * f,int num_arguments,SaveFPRegsMode save_doubles)834 void MacroAssembler::CallRuntime(const Runtime::Function* f,
835 int num_arguments,
836 SaveFPRegsMode save_doubles) {
837 // If the expected number of arguments of the runtime function is
838 // constant, we check that the actual number of arguments match the
839 // expectation.
840 CHECK(f->nargs < 0 || f->nargs == num_arguments);
841
842 // TODO(1236192): Most runtime routines don't need the number of
843 // arguments passed in because it is constant. At some point we
844 // should remove this need and make the runtime routine entry code
845 // smarter.
846 Move(eax, Immediate(num_arguments));
847 mov(ebx, Immediate(ExternalReference::Create(f)));
848 Handle<Code> code =
849 CodeFactory::CEntry(isolate(), f->result_size, save_doubles);
850 Call(code, RelocInfo::CODE_TARGET);
851 }
852
CallRuntimeDelayed(Zone * zone,Runtime::FunctionId fid,SaveFPRegsMode save_doubles)853 void TurboAssembler::CallRuntimeDelayed(Zone* zone, Runtime::FunctionId fid,
854 SaveFPRegsMode save_doubles) {
855 const Runtime::Function* f = Runtime::FunctionForId(fid);
856 // TODO(1236192): Most runtime routines don't need the number of
857 // arguments passed in because it is constant. At some point we
858 // should remove this need and make the runtime routine entry code
859 // smarter.
860 Move(eax, Immediate(f->nargs));
861 mov(ebx, Immediate(ExternalReference::Create(f)));
862 Handle<Code> code =
863 CodeFactory::CEntry(isolate(), f->result_size, save_doubles);
864 Call(code, RelocInfo::CODE_TARGET);
865 }
866
TailCallRuntime(Runtime::FunctionId fid)867 void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid) {
868 // ----------- S t a t e -------------
869 // -- esp[0] : return address
870 // -- esp[8] : argument num_arguments - 1
871 // ...
872 // -- esp[8 * num_arguments] : argument 0 (receiver)
873 //
874 // For runtime functions with variable arguments:
875 // -- eax : number of arguments
876 // -----------------------------------
877
878 const Runtime::Function* function = Runtime::FunctionForId(fid);
879 DCHECK_EQ(1, function->result_size);
880 if (function->nargs >= 0) {
881 // TODO(1236192): Most runtime routines don't need the number of
882 // arguments passed in because it is constant. At some point we
883 // should remove this need and make the runtime routine entry code
884 // smarter.
885 mov(eax, Immediate(function->nargs));
886 }
887 JumpToExternalReference(ExternalReference::Create(fid));
888 }
889
JumpToExternalReference(const ExternalReference & ext,bool builtin_exit_frame)890 void MacroAssembler::JumpToExternalReference(const ExternalReference& ext,
891 bool builtin_exit_frame) {
892 // Set the entry point and jump to the C entry runtime stub.
893 mov(ebx, Immediate(ext));
894 Handle<Code> code = CodeFactory::CEntry(isolate(), 1, kDontSaveFPRegs,
895 kArgvOnStack, builtin_exit_frame);
896 Jump(code, RelocInfo::CODE_TARGET);
897 }
898
JumpToInstructionStream(Address entry)899 void MacroAssembler::JumpToInstructionStream(Address entry) {
900 mov(kOffHeapTrampolineRegister, Immediate(entry, RelocInfo::OFF_HEAP_TARGET));
901 jmp(kOffHeapTrampolineRegister);
902 }
903
PrepareForTailCall(const ParameterCount & callee_args_count,Register caller_args_count_reg,Register scratch0,Register scratch1,int number_of_temp_values_after_return_address)904 void TurboAssembler::PrepareForTailCall(
905 const ParameterCount& callee_args_count, Register caller_args_count_reg,
906 Register scratch0, Register scratch1,
907 int number_of_temp_values_after_return_address) {
908 #if DEBUG
909 if (callee_args_count.is_reg()) {
910 DCHECK(!AreAliased(callee_args_count.reg(), caller_args_count_reg, scratch0,
911 scratch1));
912 } else {
913 DCHECK(!AreAliased(caller_args_count_reg, scratch0, scratch1));
914 }
915 #endif
916
917 // Calculate the destination address where we will put the return address
918 // after we drop current frame.
919 Register new_sp_reg = scratch0;
920 if (callee_args_count.is_reg()) {
921 sub(caller_args_count_reg, callee_args_count.reg());
922 lea(new_sp_reg,
923 Operand(ebp, caller_args_count_reg, times_pointer_size,
924 StandardFrameConstants::kCallerPCOffset -
925 number_of_temp_values_after_return_address * kPointerSize));
926 } else {
927 lea(new_sp_reg, Operand(ebp, caller_args_count_reg, times_pointer_size,
928 StandardFrameConstants::kCallerPCOffset -
929 (callee_args_count.immediate() +
930 number_of_temp_values_after_return_address) *
931 kPointerSize));
932 }
933
934 if (FLAG_debug_code) {
935 cmp(esp, new_sp_reg);
936 Check(below, AbortReason::kStackAccessBelowStackPointer);
937 }
938
939 // Copy return address from caller's frame to current frame's return address
940 // to avoid its trashing and let the following loop copy it to the right
941 // place.
942 Register tmp_reg = scratch1;
943 mov(tmp_reg, Operand(ebp, StandardFrameConstants::kCallerPCOffset));
944 mov(Operand(esp, number_of_temp_values_after_return_address * kPointerSize),
945 tmp_reg);
946
947 // Restore caller's frame pointer now as it could be overwritten by
948 // the copying loop.
949 mov(ebp, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
950
951 // +2 here is to copy both receiver and return address.
952 Register count_reg = caller_args_count_reg;
953 if (callee_args_count.is_reg()) {
954 lea(count_reg, Operand(callee_args_count.reg(),
955 2 + number_of_temp_values_after_return_address));
956 } else {
957 mov(count_reg, Immediate(callee_args_count.immediate() + 2 +
958 number_of_temp_values_after_return_address));
959 // TODO(ishell): Unroll copying loop for small immediate values.
960 }
961
962 // Now copy callee arguments to the caller frame going backwards to avoid
963 // callee arguments corruption (source and destination areas could overlap).
964 Label loop, entry;
965 jmp(&entry, Label::kNear);
966 bind(&loop);
967 dec(count_reg);
968 mov(tmp_reg, Operand(esp, count_reg, times_pointer_size, 0));
969 mov(Operand(new_sp_reg, count_reg, times_pointer_size, 0), tmp_reg);
970 bind(&entry);
971 cmp(count_reg, Immediate(0));
972 j(not_equal, &loop, Label::kNear);
973
974 // Leave current frame.
975 mov(esp, new_sp_reg);
976 }
977
InvokePrologue(const ParameterCount & expected,const ParameterCount & actual,Label * done,bool * definitely_mismatches,InvokeFlag flag,Label::Distance done_near)978 void MacroAssembler::InvokePrologue(const ParameterCount& expected,
979 const ParameterCount& actual, Label* done,
980 bool* definitely_mismatches,
981 InvokeFlag flag,
982 Label::Distance done_near) {
983 bool definitely_matches = false;
984 *definitely_mismatches = false;
985 Label invoke;
986 if (expected.is_immediate()) {
987 DCHECK(actual.is_immediate());
988 mov(eax, actual.immediate());
989 if (expected.immediate() == actual.immediate()) {
990 definitely_matches = true;
991 } else {
992 const int sentinel = SharedFunctionInfo::kDontAdaptArgumentsSentinel;
993 if (expected.immediate() == sentinel) {
994 // Don't worry about adapting arguments for builtins that
995 // don't want that done. Skip adaption code by making it look
996 // like we have a match between expected and actual number of
997 // arguments.
998 definitely_matches = true;
999 } else {
1000 *definitely_mismatches = true;
1001 mov(ebx, expected.immediate());
1002 }
1003 }
1004 } else {
1005 if (actual.is_immediate()) {
1006 // Expected is in register, actual is immediate. This is the
1007 // case when we invoke function values without going through the
1008 // IC mechanism.
1009 mov(eax, actual.immediate());
1010 cmp(expected.reg(), actual.immediate());
1011 j(equal, &invoke);
1012 DCHECK(expected.reg() == ebx);
1013 } else if (expected.reg() != actual.reg()) {
1014 // Both expected and actual are in (different) registers. This
1015 // is the case when we invoke functions using call and apply.
1016 cmp(expected.reg(), actual.reg());
1017 j(equal, &invoke);
1018 DCHECK(actual.reg() == eax);
1019 DCHECK(expected.reg() == ebx);
1020 } else {
1021 definitely_matches = true;
1022 Move(eax, actual.reg());
1023 }
1024 }
1025
1026 if (!definitely_matches) {
1027 Handle<Code> adaptor = BUILTIN_CODE(isolate(), ArgumentsAdaptorTrampoline);
1028 if (flag == CALL_FUNCTION) {
1029 call(adaptor, RelocInfo::CODE_TARGET);
1030 if (!*definitely_mismatches) {
1031 jmp(done, done_near);
1032 }
1033 } else {
1034 jmp(adaptor, RelocInfo::CODE_TARGET);
1035 }
1036 bind(&invoke);
1037 }
1038 }
1039
CheckDebugHook(Register fun,Register new_target,const ParameterCount & expected,const ParameterCount & actual)1040 void MacroAssembler::CheckDebugHook(Register fun, Register new_target,
1041 const ParameterCount& expected,
1042 const ParameterCount& actual) {
1043 Label skip_hook;
1044
1045 ExternalReference debug_hook_active =
1046 ExternalReference::debug_hook_on_function_call_address(isolate());
1047 cmpb(Operand::StaticVariable(debug_hook_active), Immediate(0));
1048 j(equal, &skip_hook);
1049
1050 {
1051 FrameScope frame(this,
1052 has_frame() ? StackFrame::NONE : StackFrame::INTERNAL);
1053 if (expected.is_reg()) {
1054 SmiTag(expected.reg());
1055 Push(expected.reg());
1056 }
1057 if (actual.is_reg()) {
1058 SmiTag(actual.reg());
1059 Push(actual.reg());
1060 SmiUntag(actual.reg());
1061 }
1062 if (new_target.is_valid()) {
1063 Push(new_target);
1064 }
1065 Push(fun);
1066 Push(fun);
1067 Operand receiver_op =
1068 actual.is_reg()
1069 ? Operand(ebp, actual.reg(), times_pointer_size, kPointerSize * 2)
1070 : Operand(ebp, actual.immediate() * times_pointer_size +
1071 kPointerSize * 2);
1072 Push(receiver_op);
1073 CallRuntime(Runtime::kDebugOnFunctionCall);
1074 Pop(fun);
1075 if (new_target.is_valid()) {
1076 Pop(new_target);
1077 }
1078 if (actual.is_reg()) {
1079 Pop(actual.reg());
1080 SmiUntag(actual.reg());
1081 }
1082 if (expected.is_reg()) {
1083 Pop(expected.reg());
1084 SmiUntag(expected.reg());
1085 }
1086 }
1087 bind(&skip_hook);
1088 }
1089
InvokeFunctionCode(Register function,Register new_target,const ParameterCount & expected,const ParameterCount & actual,InvokeFlag flag)1090 void MacroAssembler::InvokeFunctionCode(Register function, Register new_target,
1091 const ParameterCount& expected,
1092 const ParameterCount& actual,
1093 InvokeFlag flag) {
1094 // You can't call a function without a valid frame.
1095 DCHECK(flag == JUMP_FUNCTION || has_frame());
1096 DCHECK(function == edi);
1097 DCHECK_IMPLIES(new_target.is_valid(), new_target == edx);
1098
1099 // On function call, call into the debugger if necessary.
1100 CheckDebugHook(function, new_target, expected, actual);
1101
1102 // Clear the new.target register if not given.
1103 if (!new_target.is_valid()) {
1104 mov(edx, isolate()->factory()->undefined_value());
1105 }
1106
1107 Label done;
1108 bool definitely_mismatches = false;
1109 InvokePrologue(expected, actual, &done, &definitely_mismatches, flag,
1110 Label::kNear);
1111 if (!definitely_mismatches) {
1112 // We call indirectly through the code field in the function to
1113 // allow recompilation to take effect without changing any of the
1114 // call sites.
1115 static_assert(kJavaScriptCallCodeStartRegister == ecx, "ABI mismatch");
1116 mov(ecx, FieldOperand(function, JSFunction::kCodeOffset));
1117 add(ecx, Immediate(Code::kHeaderSize - kHeapObjectTag));
1118 if (flag == CALL_FUNCTION) {
1119 call(ecx);
1120 } else {
1121 DCHECK(flag == JUMP_FUNCTION);
1122 jmp(ecx);
1123 }
1124 bind(&done);
1125 }
1126 }
1127
InvokeFunction(Register fun,Register new_target,const ParameterCount & actual,InvokeFlag flag)1128 void MacroAssembler::InvokeFunction(Register fun, Register new_target,
1129 const ParameterCount& actual,
1130 InvokeFlag flag) {
1131 // You can't call a function without a valid frame.
1132 DCHECK(flag == JUMP_FUNCTION || has_frame());
1133
1134 DCHECK(fun == edi);
1135 mov(ebx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
1136 mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
1137 mov(ebx, FieldOperand(ebx, SharedFunctionInfo::kFormalParameterCountOffset));
1138
1139 ParameterCount expected(ebx);
1140 InvokeFunctionCode(edi, new_target, expected, actual, flag);
1141 }
1142
InvokeFunction(Register fun,const ParameterCount & expected,const ParameterCount & actual,InvokeFlag flag)1143 void MacroAssembler::InvokeFunction(Register fun,
1144 const ParameterCount& expected,
1145 const ParameterCount& actual,
1146 InvokeFlag flag) {
1147 // You can't call a function without a valid frame.
1148 DCHECK(flag == JUMP_FUNCTION || has_frame());
1149
1150 DCHECK(fun == edi);
1151 mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
1152
1153 InvokeFunctionCode(edi, no_reg, expected, actual, flag);
1154 }
1155
LoadGlobalProxy(Register dst)1156 void MacroAssembler::LoadGlobalProxy(Register dst) {
1157 mov(dst, NativeContextOperand());
1158 mov(dst, ContextOperand(dst, Context::GLOBAL_PROXY_INDEX));
1159 }
1160
LoadGlobalFunction(int index,Register function)1161 void MacroAssembler::LoadGlobalFunction(int index, Register function) {
1162 // Load the native context from the current context.
1163 mov(function, NativeContextOperand());
1164 // Load the function from the native context.
1165 mov(function, ContextOperand(function, index));
1166 }
1167
SafepointRegisterStackIndex(int reg_code)1168 int MacroAssembler::SafepointRegisterStackIndex(int reg_code) {
1169 // The registers are pushed starting with the lowest encoding,
1170 // which means that lowest encodings are furthest away from
1171 // the stack pointer.
1172 DCHECK(reg_code >= 0 && reg_code < kNumSafepointRegisters);
1173 return kNumSafepointRegisters - reg_code - 1;
1174 }
1175
Ret()1176 void TurboAssembler::Ret() { ret(0); }
1177
Ret(int bytes_dropped,Register scratch)1178 void TurboAssembler::Ret(int bytes_dropped, Register scratch) {
1179 if (is_uint16(bytes_dropped)) {
1180 ret(bytes_dropped);
1181 } else {
1182 pop(scratch);
1183 add(esp, Immediate(bytes_dropped));
1184 push(scratch);
1185 ret(0);
1186 }
1187 }
1188
1189
Drop(int stack_elements)1190 void MacroAssembler::Drop(int stack_elements) {
1191 if (stack_elements > 0) {
1192 add(esp, Immediate(stack_elements * kPointerSize));
1193 }
1194 }
1195
Move(Register dst,Register src)1196 void TurboAssembler::Move(Register dst, Register src) {
1197 if (dst != src) {
1198 mov(dst, src);
1199 }
1200 }
1201
Move(Register dst,const Immediate & x)1202 void TurboAssembler::Move(Register dst, const Immediate& x) {
1203 if (!x.is_heap_object_request() && x.is_zero()) {
1204 xor_(dst, dst); // Shorter than mov of 32-bit immediate 0.
1205 } else {
1206 mov(dst, x);
1207 }
1208 }
1209
Move(Operand dst,const Immediate & x)1210 void TurboAssembler::Move(Operand dst, const Immediate& x) { mov(dst, x); }
1211
Move(Register dst,Handle<HeapObject> object)1212 void TurboAssembler::Move(Register dst, Handle<HeapObject> object) {
1213 mov(dst, object);
1214 }
1215
Move(XMMRegister dst,uint32_t src)1216 void TurboAssembler::Move(XMMRegister dst, uint32_t src) {
1217 if (src == 0) {
1218 pxor(dst, dst);
1219 } else {
1220 unsigned cnt = base::bits::CountPopulation(src);
1221 unsigned nlz = base::bits::CountLeadingZeros32(src);
1222 unsigned ntz = base::bits::CountTrailingZeros32(src);
1223 if (nlz + cnt + ntz == 32) {
1224 pcmpeqd(dst, dst);
1225 if (ntz == 0) {
1226 psrld(dst, 32 - cnt);
1227 } else {
1228 pslld(dst, 32 - cnt);
1229 if (nlz != 0) psrld(dst, nlz);
1230 }
1231 } else {
1232 push(eax);
1233 mov(eax, Immediate(src));
1234 movd(dst, Operand(eax));
1235 pop(eax);
1236 }
1237 }
1238 }
1239
Move(XMMRegister dst,uint64_t src)1240 void TurboAssembler::Move(XMMRegister dst, uint64_t src) {
1241 if (src == 0) {
1242 pxor(dst, dst);
1243 } else {
1244 uint32_t lower = static_cast<uint32_t>(src);
1245 uint32_t upper = static_cast<uint32_t>(src >> 32);
1246 unsigned cnt = base::bits::CountPopulation(src);
1247 unsigned nlz = base::bits::CountLeadingZeros64(src);
1248 unsigned ntz = base::bits::CountTrailingZeros64(src);
1249 if (nlz + cnt + ntz == 64) {
1250 pcmpeqd(dst, dst);
1251 if (ntz == 0) {
1252 psrlq(dst, 64 - cnt);
1253 } else {
1254 psllq(dst, 64 - cnt);
1255 if (nlz != 0) psrlq(dst, nlz);
1256 }
1257 } else if (lower == 0) {
1258 Move(dst, upper);
1259 psllq(dst, 32);
1260 } else if (CpuFeatures::IsSupported(SSE4_1)) {
1261 CpuFeatureScope scope(this, SSE4_1);
1262 push(eax);
1263 Move(eax, Immediate(lower));
1264 movd(dst, Operand(eax));
1265 if (upper != lower) {
1266 Move(eax, Immediate(upper));
1267 }
1268 pinsrd(dst, Operand(eax), 1);
1269 pop(eax);
1270 } else {
1271 push(Immediate(upper));
1272 push(Immediate(lower));
1273 movsd(dst, Operand(esp, 0));
1274 add(esp, Immediate(kDoubleSize));
1275 }
1276 }
1277 }
1278
Pshuflw(XMMRegister dst,Operand src,uint8_t shuffle)1279 void TurboAssembler::Pshuflw(XMMRegister dst, Operand src, uint8_t shuffle) {
1280 if (CpuFeatures::IsSupported(AVX)) {
1281 CpuFeatureScope scope(this, AVX);
1282 vpshuflw(dst, src, shuffle);
1283 } else {
1284 pshuflw(dst, src, shuffle);
1285 }
1286 }
1287
Pshufd(XMMRegister dst,Operand src,uint8_t shuffle)1288 void TurboAssembler::Pshufd(XMMRegister dst, Operand src, uint8_t shuffle) {
1289 if (CpuFeatures::IsSupported(AVX)) {
1290 CpuFeatureScope scope(this, AVX);
1291 vpshufd(dst, src, shuffle);
1292 } else {
1293 pshufd(dst, src, shuffle);
1294 }
1295 }
1296
Psignb(XMMRegister dst,Operand src)1297 void TurboAssembler::Psignb(XMMRegister dst, Operand src) {
1298 if (CpuFeatures::IsSupported(AVX)) {
1299 CpuFeatureScope scope(this, AVX);
1300 vpsignb(dst, dst, src);
1301 return;
1302 }
1303 if (CpuFeatures::IsSupported(SSSE3)) {
1304 CpuFeatureScope sse_scope(this, SSSE3);
1305 psignb(dst, src);
1306 return;
1307 }
1308 UNREACHABLE();
1309 }
1310
Psignw(XMMRegister dst,Operand src)1311 void TurboAssembler::Psignw(XMMRegister dst, Operand src) {
1312 if (CpuFeatures::IsSupported(AVX)) {
1313 CpuFeatureScope scope(this, AVX);
1314 vpsignw(dst, dst, src);
1315 return;
1316 }
1317 if (CpuFeatures::IsSupported(SSSE3)) {
1318 CpuFeatureScope sse_scope(this, SSSE3);
1319 psignw(dst, src);
1320 return;
1321 }
1322 UNREACHABLE();
1323 }
1324
Psignd(XMMRegister dst,Operand src)1325 void TurboAssembler::Psignd(XMMRegister dst, Operand src) {
1326 if (CpuFeatures::IsSupported(AVX)) {
1327 CpuFeatureScope scope(this, AVX);
1328 vpsignd(dst, dst, src);
1329 return;
1330 }
1331 if (CpuFeatures::IsSupported(SSSE3)) {
1332 CpuFeatureScope sse_scope(this, SSSE3);
1333 psignd(dst, src);
1334 return;
1335 }
1336 UNREACHABLE();
1337 }
1338
Ptest(XMMRegister dst,Operand src)1339 void TurboAssembler::Ptest(XMMRegister dst, Operand src) {
1340 if (CpuFeatures::IsSupported(AVX)) {
1341 CpuFeatureScope scope(this, AVX);
1342 vptest(dst, src);
1343 return;
1344 }
1345 if (CpuFeatures::IsSupported(SSE4_1)) {
1346 CpuFeatureScope sse_scope(this, SSE4_1);
1347 ptest(dst, src);
1348 return;
1349 }
1350 UNREACHABLE();
1351 }
1352
Pshufb(XMMRegister dst,Operand src)1353 void TurboAssembler::Pshufb(XMMRegister dst, Operand src) {
1354 if (CpuFeatures::IsSupported(AVX)) {
1355 CpuFeatureScope scope(this, AVX);
1356 vpshufb(dst, dst, src);
1357 return;
1358 }
1359 if (CpuFeatures::IsSupported(SSSE3)) {
1360 CpuFeatureScope sse_scope(this, SSSE3);
1361 pshufb(dst, src);
1362 return;
1363 }
1364 UNREACHABLE();
1365 }
1366
Pextrb(Register dst,XMMRegister src,int8_t imm8)1367 void TurboAssembler::Pextrb(Register dst, XMMRegister src, int8_t imm8) {
1368 if (CpuFeatures::IsSupported(AVX)) {
1369 CpuFeatureScope scope(this, AVX);
1370 vpextrb(dst, src, imm8);
1371 return;
1372 }
1373 if (CpuFeatures::IsSupported(SSE4_1)) {
1374 CpuFeatureScope sse_scope(this, SSE4_1);
1375 pextrb(dst, src, imm8);
1376 return;
1377 }
1378 UNREACHABLE();
1379 }
1380
Pextrw(Register dst,XMMRegister src,int8_t imm8)1381 void TurboAssembler::Pextrw(Register dst, XMMRegister src, int8_t imm8) {
1382 if (CpuFeatures::IsSupported(AVX)) {
1383 CpuFeatureScope scope(this, AVX);
1384 vpextrw(dst, src, imm8);
1385 return;
1386 }
1387 if (CpuFeatures::IsSupported(SSE4_1)) {
1388 CpuFeatureScope sse_scope(this, SSE4_1);
1389 pextrw(dst, src, imm8);
1390 return;
1391 }
1392 UNREACHABLE();
1393 }
1394
Pextrd(Register dst,XMMRegister src,int8_t imm8)1395 void TurboAssembler::Pextrd(Register dst, XMMRegister src, int8_t imm8) {
1396 if (imm8 == 0) {
1397 Movd(dst, src);
1398 return;
1399 }
1400 if (CpuFeatures::IsSupported(AVX)) {
1401 CpuFeatureScope scope(this, AVX);
1402 vpextrd(dst, src, imm8);
1403 return;
1404 }
1405 if (CpuFeatures::IsSupported(SSE4_1)) {
1406 CpuFeatureScope sse_scope(this, SSE4_1);
1407 pextrd(dst, src, imm8);
1408 return;
1409 }
1410 DCHECK_LT(imm8, 4);
1411 pshufd(xmm0, src, imm8);
1412 movd(dst, xmm0);
1413 }
1414
Pinsrd(XMMRegister dst,Operand src,int8_t imm8,bool is_64_bits)1415 void TurboAssembler::Pinsrd(XMMRegister dst, Operand src, int8_t imm8,
1416 bool is_64_bits) {
1417 if (CpuFeatures::IsSupported(SSE4_1)) {
1418 CpuFeatureScope sse_scope(this, SSE4_1);
1419 pinsrd(dst, src, imm8);
1420 return;
1421 }
1422 if (is_64_bits) {
1423 movd(xmm0, src);
1424 if (imm8 == 1) {
1425 punpckldq(dst, xmm0);
1426 } else {
1427 DCHECK_EQ(0, imm8);
1428 psrlq(dst, 32);
1429 punpckldq(xmm0, dst);
1430 movaps(dst, xmm0);
1431 }
1432 } else {
1433 DCHECK_LT(imm8, 4);
1434 push(eax);
1435 mov(eax, src);
1436 pinsrw(dst, eax, imm8 * 2);
1437 shr(eax, 16);
1438 pinsrw(dst, eax, imm8 * 2 + 1);
1439 pop(eax);
1440 }
1441 }
1442
Lzcnt(Register dst,Operand src)1443 void TurboAssembler::Lzcnt(Register dst, Operand src) {
1444 if (CpuFeatures::IsSupported(LZCNT)) {
1445 CpuFeatureScope scope(this, LZCNT);
1446 lzcnt(dst, src);
1447 return;
1448 }
1449 Label not_zero_src;
1450 bsr(dst, src);
1451 j(not_zero, ¬_zero_src, Label::kNear);
1452 Move(dst, Immediate(63)); // 63^31 == 32
1453 bind(¬_zero_src);
1454 xor_(dst, Immediate(31)); // for x in [0..31], 31^x == 31-x.
1455 }
1456
Tzcnt(Register dst,Operand src)1457 void TurboAssembler::Tzcnt(Register dst, Operand src) {
1458 if (CpuFeatures::IsSupported(BMI1)) {
1459 CpuFeatureScope scope(this, BMI1);
1460 tzcnt(dst, src);
1461 return;
1462 }
1463 Label not_zero_src;
1464 bsf(dst, src);
1465 j(not_zero, ¬_zero_src, Label::kNear);
1466 Move(dst, Immediate(32)); // The result of tzcnt is 32 if src = 0.
1467 bind(¬_zero_src);
1468 }
1469
Popcnt(Register dst,Operand src)1470 void TurboAssembler::Popcnt(Register dst, Operand src) {
1471 if (CpuFeatures::IsSupported(POPCNT)) {
1472 CpuFeatureScope scope(this, POPCNT);
1473 popcnt(dst, src);
1474 return;
1475 }
1476 UNREACHABLE();
1477 }
1478
LoadWeakValue(Register in_out,Label * target_if_cleared)1479 void MacroAssembler::LoadWeakValue(Register in_out, Label* target_if_cleared) {
1480 cmp(in_out, Immediate(kClearedWeakHeapObject));
1481 j(equal, target_if_cleared);
1482
1483 and_(in_out, Immediate(~kWeakHeapObjectMask));
1484 }
1485
IncrementCounter(StatsCounter * counter,int value)1486 void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) {
1487 DCHECK_GT(value, 0);
1488 if (FLAG_native_code_counters && counter->Enabled()) {
1489 Operand operand =
1490 Operand::StaticVariable(ExternalReference::Create(counter));
1491 if (value == 1) {
1492 inc(operand);
1493 } else {
1494 add(operand, Immediate(value));
1495 }
1496 }
1497 }
1498
1499
DecrementCounter(StatsCounter * counter,int value)1500 void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) {
1501 DCHECK_GT(value, 0);
1502 if (FLAG_native_code_counters && counter->Enabled()) {
1503 Operand operand =
1504 Operand::StaticVariable(ExternalReference::Create(counter));
1505 if (value == 1) {
1506 dec(operand);
1507 } else {
1508 sub(operand, Immediate(value));
1509 }
1510 }
1511 }
1512
Assert(Condition cc,AbortReason reason)1513 void TurboAssembler::Assert(Condition cc, AbortReason reason) {
1514 if (emit_debug_code()) Check(cc, reason);
1515 }
1516
AssertUnreachable(AbortReason reason)1517 void TurboAssembler::AssertUnreachable(AbortReason reason) {
1518 if (emit_debug_code()) Abort(reason);
1519 }
1520
Check(Condition cc,AbortReason reason)1521 void TurboAssembler::Check(Condition cc, AbortReason reason) {
1522 Label L;
1523 j(cc, &L);
1524 Abort(reason);
1525 // will not return here
1526 bind(&L);
1527 }
1528
CheckStackAlignment()1529 void TurboAssembler::CheckStackAlignment() {
1530 int frame_alignment = base::OS::ActivationFrameAlignment();
1531 int frame_alignment_mask = frame_alignment - 1;
1532 if (frame_alignment > kPointerSize) {
1533 DCHECK(base::bits::IsPowerOfTwo(frame_alignment));
1534 Label alignment_as_expected;
1535 test(esp, Immediate(frame_alignment_mask));
1536 j(zero, &alignment_as_expected);
1537 // Abort if stack is not aligned.
1538 int3();
1539 bind(&alignment_as_expected);
1540 }
1541 }
1542
Abort(AbortReason reason)1543 void TurboAssembler::Abort(AbortReason reason) {
1544 #ifdef DEBUG
1545 const char* msg = GetAbortReason(reason);
1546 if (msg != nullptr) {
1547 RecordComment("Abort message: ");
1548 RecordComment(msg);
1549 }
1550
1551 if (FLAG_trap_on_abort) {
1552 int3();
1553 return;
1554 }
1555 #endif
1556
1557 Move(edx, Smi::FromInt(static_cast<int>(reason)));
1558
1559 // Disable stub call restrictions to always allow calls to abort.
1560 if (!has_frame()) {
1561 // We don't actually want to generate a pile of code for this, so just
1562 // claim there is a stack frame, without generating one.
1563 FrameScope scope(this, StackFrame::NONE);
1564 Call(BUILTIN_CODE(isolate(), Abort), RelocInfo::CODE_TARGET);
1565 } else {
1566 Call(BUILTIN_CODE(isolate(), Abort), RelocInfo::CODE_TARGET);
1567 }
1568 // will not return here
1569 int3();
1570 }
1571
1572
PrepareCallCFunction(int num_arguments,Register scratch)1573 void TurboAssembler::PrepareCallCFunction(int num_arguments, Register scratch) {
1574 int frame_alignment = base::OS::ActivationFrameAlignment();
1575 if (frame_alignment != 0) {
1576 // Make stack end at alignment and make room for num_arguments words
1577 // and the original value of esp.
1578 mov(scratch, esp);
1579 sub(esp, Immediate((num_arguments + 1) * kPointerSize));
1580 DCHECK(base::bits::IsPowerOfTwo(frame_alignment));
1581 and_(esp, -frame_alignment);
1582 mov(Operand(esp, num_arguments * kPointerSize), scratch);
1583 } else {
1584 sub(esp, Immediate(num_arguments * kPointerSize));
1585 }
1586 }
1587
CallCFunction(ExternalReference function,int num_arguments)1588 void TurboAssembler::CallCFunction(ExternalReference function,
1589 int num_arguments) {
1590 // Trashing eax is ok as it will be the return value.
1591 mov(eax, Immediate(function));
1592 CallCFunction(eax, num_arguments);
1593 }
1594
CallCFunction(Register function,int num_arguments)1595 void TurboAssembler::CallCFunction(Register function, int num_arguments) {
1596 DCHECK_LE(num_arguments, kMaxCParameters);
1597 DCHECK(has_frame());
1598 // Check stack alignment.
1599 if (emit_debug_code()) {
1600 CheckStackAlignment();
1601 }
1602
1603 call(function);
1604 if (base::OS::ActivationFrameAlignment() != 0) {
1605 mov(esp, Operand(esp, num_arguments * kPointerSize));
1606 } else {
1607 add(esp, Immediate(num_arguments * kPointerSize));
1608 }
1609 }
1610
RetpolineCall(Register reg)1611 void TurboAssembler::RetpolineCall(Register reg) {
1612 Label setup_return, setup_target, inner_indirect_branch, capture_spec;
1613
1614 jmp(&setup_return); // Jump past the entire retpoline below.
1615
1616 bind(&inner_indirect_branch);
1617 call(&setup_target);
1618
1619 bind(&capture_spec);
1620 pause();
1621 jmp(&capture_spec);
1622
1623 bind(&setup_target);
1624 mov(Operand(esp, 0), reg);
1625 ret(0);
1626
1627 bind(&setup_return);
1628 call(&inner_indirect_branch); // Callee will return after this instruction.
1629 }
1630
RetpolineCall(Address destination,RelocInfo::Mode rmode)1631 void TurboAssembler::RetpolineCall(Address destination, RelocInfo::Mode rmode) {
1632 Label setup_return, setup_target, inner_indirect_branch, capture_spec;
1633
1634 jmp(&setup_return); // Jump past the entire retpoline below.
1635
1636 bind(&inner_indirect_branch);
1637 call(&setup_target);
1638
1639 bind(&capture_spec);
1640 pause();
1641 jmp(&capture_spec);
1642
1643 bind(&setup_target);
1644 mov(Operand(esp, 0), destination, rmode);
1645 ret(0);
1646
1647 bind(&setup_return);
1648 call(&inner_indirect_branch); // Callee will return after this instruction.
1649 }
1650
RetpolineJump(Register reg)1651 void TurboAssembler::RetpolineJump(Register reg) {
1652 Label setup_target, capture_spec;
1653
1654 call(&setup_target);
1655
1656 bind(&capture_spec);
1657 pause();
1658 jmp(&capture_spec);
1659
1660 bind(&setup_target);
1661 mov(Operand(esp, 0), reg);
1662 ret(0);
1663 }
1664
1665 #ifdef DEBUG
AreAliased(Register reg1,Register reg2,Register reg3,Register reg4,Register reg5,Register reg6,Register reg7,Register reg8)1666 bool AreAliased(Register reg1,
1667 Register reg2,
1668 Register reg3,
1669 Register reg4,
1670 Register reg5,
1671 Register reg6,
1672 Register reg7,
1673 Register reg8) {
1674 int n_of_valid_regs = reg1.is_valid() + reg2.is_valid() +
1675 reg3.is_valid() + reg4.is_valid() + reg5.is_valid() + reg6.is_valid() +
1676 reg7.is_valid() + reg8.is_valid();
1677
1678 RegList regs = 0;
1679 if (reg1.is_valid()) regs |= reg1.bit();
1680 if (reg2.is_valid()) regs |= reg2.bit();
1681 if (reg3.is_valid()) regs |= reg3.bit();
1682 if (reg4.is_valid()) regs |= reg4.bit();
1683 if (reg5.is_valid()) regs |= reg5.bit();
1684 if (reg6.is_valid()) regs |= reg6.bit();
1685 if (reg7.is_valid()) regs |= reg7.bit();
1686 if (reg8.is_valid()) regs |= reg8.bit();
1687 int n_of_non_aliasing_regs = NumRegs(regs);
1688
1689 return n_of_valid_regs != n_of_non_aliasing_regs;
1690 }
1691 #endif
1692
1693
CheckPageFlag(Register object,Register scratch,int mask,Condition cc,Label * condition_met,Label::Distance condition_met_distance)1694 void TurboAssembler::CheckPageFlag(Register object, Register scratch, int mask,
1695 Condition cc, Label* condition_met,
1696 Label::Distance condition_met_distance) {
1697 DCHECK(cc == zero || cc == not_zero);
1698 if (scratch == object) {
1699 and_(scratch, Immediate(~Page::kPageAlignmentMask));
1700 } else {
1701 mov(scratch, Immediate(~Page::kPageAlignmentMask));
1702 and_(scratch, object);
1703 }
1704 if (mask < (1 << kBitsPerByte)) {
1705 test_b(Operand(scratch, MemoryChunk::kFlagsOffset), Immediate(mask));
1706 } else {
1707 test(Operand(scratch, MemoryChunk::kFlagsOffset), Immediate(mask));
1708 }
1709 j(cc, condition_met, condition_met_distance);
1710 }
1711
ComputeCodeStartAddress(Register dst)1712 void TurboAssembler::ComputeCodeStartAddress(Register dst) {
1713 // In order to get the address of the current instruction, we first need
1714 // to use a call and then use a pop, thus pushing the return address to
1715 // the stack and then popping it into the register.
1716 Label current;
1717 call(¤t);
1718 int pc = pc_offset();
1719 bind(¤t);
1720 pop(dst);
1721 if (pc != 0) {
1722 sub(dst, Immediate(pc));
1723 }
1724 }
1725
ResetSpeculationPoisonRegister()1726 void TurboAssembler::ResetSpeculationPoisonRegister() {
1727 mov(kSpeculationPoisonRegister, Immediate(-1));
1728 }
1729
1730 } // namespace internal
1731 } // namespace v8
1732
1733 #endif // V8_TARGET_ARCH_IA32
1734