1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #ifndef INCLUDED_FROM_MACRO_ASSEMBLER_H
6 #error This header must be included via macro-assembler.h
7 #endif
8 
9 #ifndef V8_CODEGEN_X64_MACRO_ASSEMBLER_X64_H_
10 #define V8_CODEGEN_X64_MACRO_ASSEMBLER_X64_H_
11 
12 #include "src/base/flags.h"
13 #include "src/codegen/bailout-reason.h"
14 #include "src/codegen/shared-ia32-x64/macro-assembler-shared-ia32-x64.h"
15 #include "src/codegen/x64/assembler-x64.h"
16 #include "src/common/globals.h"
17 #include "src/execution/isolate-data.h"
18 #include "src/objects/contexts.h"
19 #include "src/objects/tagged-index.h"
20 
21 namespace v8 {
22 namespace internal {
23 
24 // Convenience for platform-independent signatures.
25 using MemOperand = Operand;
26 
27 class StringConstantBase;
28 
29 struct SmiIndex {
SmiIndexSmiIndex30   SmiIndex(Register index_register, ScaleFactor scale)
31       : reg(index_register), scale(scale) {}
32   Register reg;
33   ScaleFactor scale;
34 };
35 
36 // TODO(victorgomes): Move definition to macro-assembler.h, once all other
37 // platforms are updated.
38 enum class StackLimitKind { kInterruptStackLimit, kRealStackLimit };
39 
40 // Convenient class to access arguments below the stack pointer.
41 class StackArgumentsAccessor {
42  public:
43   // argc = the number of arguments not including the receiver.
StackArgumentsAccessor(Register argc)44   explicit StackArgumentsAccessor(Register argc) : argc_(argc) {
45     DCHECK_NE(argc_, no_reg);
46   }
47 
48   // Argument 0 is the receiver (despite argc not including the receiver).
49   Operand operator[](int index) const { return GetArgumentOperand(index); }
50 
51   Operand GetArgumentOperand(int index) const;
GetReceiverOperand()52   Operand GetReceiverOperand() const { return GetArgumentOperand(0); }
53 
54  private:
55   const Register argc_;
56 
57   DISALLOW_IMPLICIT_CONSTRUCTORS(StackArgumentsAccessor);
58 };
59 
60 class V8_EXPORT_PRIVATE TurboAssembler
61     : public SharedTurboAssemblerBase<TurboAssembler> {
62  public:
63   using SharedTurboAssemblerBase<TurboAssembler>::SharedTurboAssemblerBase;
64 
PushReturnAddressFrom(Register src)65   void PushReturnAddressFrom(Register src) { pushq(src); }
PopReturnAddressTo(Register dst)66   void PopReturnAddressTo(Register dst) { popq(dst); }
67 
68   void Ret();
69 
70   // Return and drop arguments from stack, where the number of arguments
71   // may be bigger than 2^16 - 1.  Requires a scratch register.
72   void Ret(int bytes_dropped, Register scratch);
73 
74   // Operations on roots in the root-array.
75   Operand RootAsOperand(RootIndex index);
76   void LoadRoot(Register destination, RootIndex index) final;
LoadRoot(Operand destination,RootIndex index)77   void LoadRoot(Operand destination, RootIndex index) {
78     LoadRoot(kScratchRegister, index);
79     movq(destination, kScratchRegister);
80   }
81 
82   void Push(Register src);
83   void Push(Operand src);
84   void Push(Immediate value);
85   void Push(Smi smi);
Push(TaggedIndex index)86   void Push(TaggedIndex index) {
87     Push(Immediate(static_cast<uint32_t>(index.ptr())));
88   }
89   void Push(Handle<HeapObject> source);
90 
91   enum class PushArrayOrder { kNormal, kReverse };
92   // `array` points to the first element (the lowest address).
93   // `array` and `size` are not modified.
94   void PushArray(Register array, Register size, Register scratch,
95                  PushArrayOrder order = PushArrayOrder::kNormal);
96 
97   // Before calling a C-function from generated code, align arguments on stack.
98   // After aligning the frame, arguments must be stored in rsp[0], rsp[8],
99   // etc., not pushed. The argument count assumes all arguments are word sized.
100   // The number of slots reserved for arguments depends on platform. On Windows
101   // stack slots are reserved for the arguments passed in registers. On other
102   // platforms stack slots are only reserved for the arguments actually passed
103   // on the stack.
104   void PrepareCallCFunction(int num_arguments);
105 
106   // Calls a C function and cleans up the space for arguments allocated
107   // by PrepareCallCFunction. The called function is not allowed to trigger a
108   // garbage collection, since that might move the code and invalidate the
109   // return address (unless this is somehow accounted for by the called
110   // function).
111   void CallCFunction(ExternalReference function, int num_arguments);
112   void CallCFunction(Register function, int num_arguments);
113 
114   // Calculate the number of stack slots to reserve for arguments when calling a
115   // C function.
116   int ArgumentStackSlotsForCFunctionCall(int num_arguments);
117 
118   void CheckPageFlag(Register object, Register scratch, int mask, Condition cc,
119                      Label* condition_met,
120                      Label::Distance condition_met_distance = Label::kFar);
121 
122   // Define movq here instead of using AVX_OP. movq is defined using templates
123   // and there is a function template `void movq(P1)`, while technically
124   // impossible, will be selected when deducing the arguments for AvxHelper.
125   void Movq(XMMRegister dst, Register src);
126   void Movq(Register dst, XMMRegister src);
127 
128   void Cvtss2sd(XMMRegister dst, XMMRegister src);
129   void Cvtss2sd(XMMRegister dst, Operand src);
130   void Cvtsd2ss(XMMRegister dst, XMMRegister src);
131   void Cvtsd2ss(XMMRegister dst, Operand src);
132   void Cvttsd2si(Register dst, XMMRegister src);
133   void Cvttsd2si(Register dst, Operand src);
134   void Cvttsd2siq(Register dst, XMMRegister src);
135   void Cvttsd2siq(Register dst, Operand src);
136   void Cvttss2si(Register dst, XMMRegister src);
137   void Cvttss2si(Register dst, Operand src);
138   void Cvttss2siq(Register dst, XMMRegister src);
139   void Cvttss2siq(Register dst, Operand src);
140   void Cvtlui2ss(XMMRegister dst, Register src);
141   void Cvtlui2ss(XMMRegister dst, Operand src);
142   void Cvtlui2sd(XMMRegister dst, Register src);
143   void Cvtlui2sd(XMMRegister dst, Operand src);
144   void Cvtqui2ss(XMMRegister dst, Register src);
145   void Cvtqui2ss(XMMRegister dst, Operand src);
146   void Cvtqui2sd(XMMRegister dst, Register src);
147   void Cvtqui2sd(XMMRegister dst, Operand src);
148   void Cvttsd2uiq(Register dst, Operand src, Label* fail = nullptr);
149   void Cvttsd2uiq(Register dst, XMMRegister src, Label* fail = nullptr);
150   void Cvttss2uiq(Register dst, Operand src, Label* fail = nullptr);
151   void Cvttss2uiq(Register dst, XMMRegister src, Label* fail = nullptr);
152 
153   // cvtsi2sd and cvtsi2ss instructions only write to the low 64/32-bit of dst
154   // register, which hinders register renaming and makes dependence chains
155   // longer. So we use xorpd to clear the dst register before cvtsi2sd for
156   // non-AVX and a scratch XMM register as first src for AVX to solve this
157   // issue.
158   void Cvtqsi2ss(XMMRegister dst, Register src);
159   void Cvtqsi2ss(XMMRegister dst, Operand src);
160   void Cvtqsi2sd(XMMRegister dst, Register src);
161   void Cvtqsi2sd(XMMRegister dst, Operand src);
162   void Cvtlsi2ss(XMMRegister dst, Register src);
163   void Cvtlsi2ss(XMMRegister dst, Operand src);
164   void Cvtlsi2sd(XMMRegister dst, Register src);
165   void Cvtlsi2sd(XMMRegister dst, Operand src);
166 
167   void PextrdPreSse41(Register dst, XMMRegister src, uint8_t imm8);
168   void Pextrq(Register dst, XMMRegister src, int8_t imm8);
169 
170   void PinsrdPreSse41(XMMRegister dst, Register src2, uint8_t imm8,
171                       uint32_t* load_pc_offset = nullptr);
172   void PinsrdPreSse41(XMMRegister dst, Operand src2, uint8_t imm8,
173                       uint32_t* load_pc_offset = nullptr);
174 
175   void Pinsrq(XMMRegister dst, XMMRegister src1, Register src2, uint8_t imm8,
176               uint32_t* load_pc_offset = nullptr);
177   void Pinsrq(XMMRegister dst, XMMRegister src1, Operand src2, uint8_t imm8,
178               uint32_t* load_pc_offset = nullptr);
179 
180   void F64x2Qfma(XMMRegister dst, XMMRegister src1, XMMRegister src2,
181                  XMMRegister src3, XMMRegister tmp);
182   void F64x2Qfms(XMMRegister dst, XMMRegister src1, XMMRegister src2,
183                  XMMRegister src3, XMMRegister tmp);
184   void F32x4Qfma(XMMRegister dst, XMMRegister src1, XMMRegister src2,
185                  XMMRegister src3, XMMRegister tmp);
186   void F32x4Qfms(XMMRegister dst, XMMRegister src1, XMMRegister src2,
187                  XMMRegister src3, XMMRegister tmp);
188 
189   void Lzcntq(Register dst, Register src);
190   void Lzcntq(Register dst, Operand src);
191   void Lzcntl(Register dst, Register src);
192   void Lzcntl(Register dst, Operand src);
193   void Tzcntq(Register dst, Register src);
194   void Tzcntq(Register dst, Operand src);
195   void Tzcntl(Register dst, Register src);
196   void Tzcntl(Register dst, Operand src);
197   void Popcntl(Register dst, Register src);
198   void Popcntl(Register dst, Operand src);
199   void Popcntq(Register dst, Register src);
200   void Popcntq(Register dst, Operand src);
201 
202   void Cmp(Register dst, Smi src);
203   void Cmp(Operand dst, Smi src);
204   void Cmp(Register dst, int32_t src);
205 
206   // ---------------------------------------------------------------------------
207   // Conversions between tagged smi values and non-tagged integer values.
208 
209   // Tag an word-size value. The result must be known to be a valid smi value.
210   void SmiTag(Register reg);
211   // Requires dst != src
212   void SmiTag(Register dst, Register src);
213 
214   // Simple comparison of smis.  Both sides must be known smis to use these,
215   // otherwise use Cmp.
216   void SmiCompare(Register smi1, Register smi2);
217   void SmiCompare(Register dst, Smi src);
218   void SmiCompare(Register dst, Operand src);
219   void SmiCompare(Operand dst, Register src);
220   void SmiCompare(Operand dst, Smi src);
221 
222   // Functions performing a check on a known or potential smi. Returns
223   // a condition that is satisfied if the check is successful.
224   Condition CheckSmi(Register src);
225   Condition CheckSmi(Operand src);
226 
227   // Abort execution if argument is a smi, enabled via --debug-code.
228   void AssertNotSmi(Register object);
229 
230   // Abort execution if argument is not a smi, enabled via --debug-code.
231   void AssertSmi(Register object);
232   void AssertSmi(Operand object);
233 
234   // Test-and-jump functions. Typically combines a check function
235   // above with a conditional jump.
236 
237   // Jump to label if the value is a tagged smi.
238   void JumpIfSmi(Register src, Label* on_smi,
239                  Label::Distance near_jump = Label::kFar);
240 
241   // Jump to label if the value is not a tagged smi.
242   void JumpIfNotSmi(Register src, Label* on_not_smi,
243                     Label::Distance near_jump = Label::kFar);
244 
245   // Jump to label if the value is not a tagged smi.
246   void JumpIfNotSmi(Operand src, Label* on_not_smi,
247                     Label::Distance near_jump = Label::kFar);
248 
249   // Operations on tagged smi values.
250 
251   // Smis represent a subset of integers. The subset is always equivalent to
252   // a two's complement interpretation of a fixed number of bits.
253 
254   // Add an integer constant to a tagged smi, giving a tagged smi as result.
255   // No overflow testing on the result is done.
256   void SmiAddConstant(Operand dst, Smi constant);
257 
258   // Specialized operations
259 
260   // Converts, if necessary, a smi to a combination of number and
261   // multiplier to be used as a scaled index.
262   // The src register contains a *positive* smi value. The shift is the
263   // power of two to multiply the index value by (e.g. to index by
264   // smi-value * kSystemPointerSize, pass the smi and kSystemPointerSizeLog2).
265   // The returned index register may be either src or dst, depending
266   // on what is most efficient. If src and dst are different registers,
267   // src is always unchanged.
268   SmiIndex SmiToIndex(Register dst, Register src, int shift);
269 
JumpIfEqual(Register a,int32_t b,Label * dest)270   void JumpIfEqual(Register a, int32_t b, Label* dest) {
271     cmpl(a, Immediate(b));
272     j(equal, dest);
273   }
274 
JumpIfLessThan(Register a,int32_t b,Label * dest)275   void JumpIfLessThan(Register a, int32_t b, Label* dest) {
276     cmpl(a, Immediate(b));
277     j(less, dest);
278   }
279 
280 #ifdef V8_MAP_PACKING
281   void UnpackMapWord(Register r);
282 #endif
283 
284   void LoadMap(Register destination, Register object);
285 
Move(Register dst,intptr_t x)286   void Move(Register dst, intptr_t x) {
287     if (x == 0) {
288       xorl(dst, dst);
289       // The following shorter sequence for uint8 causes performance
290       // regressions:
291       // xorl(dst, dst); movb(dst,
292       // Immediate(static_cast<uint32_t>(x)));
293     } else if (is_uint32(x)) {
294       movl(dst, Immediate(static_cast<uint32_t>(x)));
295     } else if (is_int32(x)) {
296       // "movq reg64, imm32" is sign extending.
297       movq(dst, Immediate(static_cast<int32_t>(x)));
298     } else {
299       movq(dst, Immediate64(x));
300     }
301   }
302   void Move(Operand dst, intptr_t x);
303   void Move(Register dst, Smi source);
304 
Move(Operand dst,Smi source)305   void Move(Operand dst, Smi source) {
306     Register constant = GetSmiConstant(source);
307     movq(dst, constant);
308   }
309 
Move(Register dst,TaggedIndex source)310   void Move(Register dst, TaggedIndex source) { Move(dst, source.ptr()); }
311 
Move(Operand dst,TaggedIndex source)312   void Move(Operand dst, TaggedIndex source) { Move(dst, source.ptr()); }
313 
314   void Move(Register dst, ExternalReference ext);
315 
316   void Move(XMMRegister dst, uint32_t src);
317   void Move(XMMRegister dst, uint64_t src);
Move(XMMRegister dst,float src)318   void Move(XMMRegister dst, float src) { Move(dst, bit_cast<uint32_t>(src)); }
Move(XMMRegister dst,double src)319   void Move(XMMRegister dst, double src) { Move(dst, bit_cast<uint64_t>(src)); }
320   void Move(XMMRegister dst, uint64_t high, uint64_t low);
321 
322   // Move if the registers are not identical.
323   void Move(Register target, Register source);
324   void Move(XMMRegister target, XMMRegister source);
325 
326   void Move(Register target, Operand source);
327   void Move(Register target, Immediate source);
328 
329   void Move(Register dst, Handle<HeapObject> source,
330             RelocInfo::Mode rmode = RelocInfo::FULL_EMBEDDED_OBJECT);
331   void Move(Operand dst, Handle<HeapObject> source,
332             RelocInfo::Mode rmode = RelocInfo::FULL_EMBEDDED_OBJECT);
333 
334   // Loads a pointer into a register with a relocation mode.
Move(Register dst,Address ptr,RelocInfo::Mode rmode)335   void Move(Register dst, Address ptr, RelocInfo::Mode rmode) {
336     // This method must not be used with heap object references. The stored
337     // address is not GC safe. Use the handle version instead.
338     DCHECK(rmode == RelocInfo::NONE || rmode > RelocInfo::LAST_GCED_ENUM);
339     movq(dst, Immediate64(ptr, rmode));
340   }
341 
342   // Move src0 to dst0 and src1 to dst1, handling possible overlaps.
343   void MovePair(Register dst0, Register src0, Register dst1, Register src1);
344 
345   void MoveStringConstant(
346       Register result, const StringConstantBase* string,
347       RelocInfo::Mode rmode = RelocInfo::FULL_EMBEDDED_OBJECT);
348 
349   // Convert smi to word-size sign-extended value.
350   void SmiUntag(Register reg);
351   // Requires dst != src
352   void SmiUntag(Register dst, Register src);
353   void SmiUntag(Register dst, Operand src);
354 
355   // Convert smi to 32-bit value.
356   void SmiToInt32(Register reg);
357 
358   // Loads the address of the external reference into the destination
359   // register.
360   void LoadAddress(Register destination, ExternalReference source);
361 
362   void LoadFromConstantsTable(Register destination, int constant_index) final;
363   void LoadRootRegisterOffset(Register destination, intptr_t offset) final;
364   void LoadRootRelative(Register destination, int32_t offset) final;
365 
366   // Operand pointing to an external reference.
367   // May emit code to set up the scratch register. The operand is
368   // only guaranteed to be correct as long as the scratch register
369   // isn't changed.
370   // If the operand is used more than once, use a scratch register
371   // that is guaranteed not to be clobbered.
372   Operand ExternalReferenceAsOperand(ExternalReference reference,
373                                      Register scratch = kScratchRegister);
374 
Call(Register reg)375   void Call(Register reg) { call(reg); }
376   void Call(Operand op);
377   void Call(Handle<Code> code_object, RelocInfo::Mode rmode);
378   void Call(Address destination, RelocInfo::Mode rmode);
379   void Call(ExternalReference ext);
Call(Label * target)380   void Call(Label* target) { call(target); }
381 
382   Operand EntryFromBuiltinAsOperand(Builtin builtin_index);
383   Operand EntryFromBuiltinIndexAsOperand(Register builtin_index);
384   void CallBuiltinByIndex(Register builtin_index);
385   void CallBuiltin(Builtin builtin);
386   void TailCallBuiltin(Builtin builtin);
387 
388   void LoadCodeObjectEntry(Register destination, Register code_object);
389   void CallCodeObject(Register code_object);
390   void JumpCodeObject(Register code_object,
391                       JumpMode jump_mode = JumpMode::kJump);
392 
393   // Load code entry point from the CodeDataContainer object.
394   void LoadCodeDataContainerEntry(Register destination,
395                                   Register code_data_container_object);
396   // Load code entry point from the CodeDataContainer object and compute
397   // Code object pointer out of it. Must not be used for CodeDataContainers
398   // corresponding to builtins, because their entry points values point to
399   // the embedded instruction stream in .text section.
400   void LoadCodeDataContainerCodeNonBuiltin(Register destination,
401                                            Register code_data_container_object);
402   void CallCodeDataContainerObject(Register code_data_container_object);
403   void JumpCodeDataContainerObject(Register code_data_container_object,
404                                    JumpMode jump_mode = JumpMode::kJump);
405 
406   // Helper functions that dispatch either to Call/JumpCodeObject or to
407   // Call/JumpCodeDataContainerObject.
408   void LoadCodeTEntry(Register destination, Register code);
409   void CallCodeTObject(Register code);
410   void JumpCodeTObject(Register code, JumpMode jump_mode = JumpMode::kJump);
411 
412   void Jump(Address destination, RelocInfo::Mode rmode);
413   void Jump(const ExternalReference& reference);
414   void Jump(Operand op);
415   void Jump(Handle<Code> code_object, RelocInfo::Mode rmode,
416             Condition cc = always);
417 
418   void CallForDeoptimization(Builtin target, int deopt_id, Label* exit,
419                              DeoptimizeKind kind, Label* ret,
420                              Label* jump_deoptimization_entry_label);
421 
422   void Trap();
423   void DebugBreak();
424 
425   void CompareRoot(Register with, RootIndex index);
426   void CompareRoot(Operand with, RootIndex index);
427 
428   // Generates function and stub prologue code.
429   void StubPrologue(StackFrame::Type type);
430   void Prologue();
431 
432   // Helpers for argument handling
433   enum ArgumentsCountMode { kCountIncludesReceiver, kCountExcludesReceiver };
434   enum ArgumentsCountType { kCountIsInteger, kCountIsSmi, kCountIsBytes };
435   void DropArguments(Register count, Register scratch, ArgumentsCountType type,
436                      ArgumentsCountMode mode);
437   void DropArgumentsAndPushNewReceiver(Register argc, Register receiver,
438                                        Register scratch,
439                                        ArgumentsCountType type,
440                                        ArgumentsCountMode mode);
441   void DropArgumentsAndPushNewReceiver(Register argc, Operand receiver,
442                                        Register scratch,
443                                        ArgumentsCountType type,
444                                        ArgumentsCountMode mode);
445 
446   // Calls Abort(msg) if the condition cc is not satisfied.
447   // Use --debug_code to enable.
448   void Assert(Condition cc, AbortReason reason);
449 
450   // Like Assert(), but without condition.
451   // Use --debug_code to enable.
452   void AssertUnreachable(AbortReason reason);
453 
454   // Abort execution if a 64 bit register containing a 32 bit payload does not
455   // have zeros in the top 32 bits, enabled via --debug-code.
456   void AssertZeroExtended(Register reg);
457 
458   // Like Assert(), but always enabled.
459   void Check(Condition cc, AbortReason reason);
460 
461   // Print a message to stdout and abort execution.
462   void Abort(AbortReason msg);
463 
464   // Check that the stack is aligned.
465   void CheckStackAlignment();
466 
467   // Activation support.
468   void EnterFrame(StackFrame::Type type);
EnterFrame(StackFrame::Type type,bool load_constant_pool_pointer_reg)469   void EnterFrame(StackFrame::Type type, bool load_constant_pool_pointer_reg) {
470     // Out-of-line constant pool not implemented on x64.
471     UNREACHABLE();
472   }
473   void LeaveFrame(StackFrame::Type type);
474 
475 // Allocate stack space of given size (i.e. decrement {rsp} by the value
476 // stored in the given register, or by a constant). If you need to perform a
477 // stack check, do it before calling this function because this function may
478 // write into the newly allocated space. It may also overwrite the given
479 // register's value, in the version that takes a register.
480 #if defined(V8_TARGET_OS_WIN) || defined(V8_TARGET_OS_MACOSX)
481   void AllocateStackSpace(Register bytes_scratch);
482   void AllocateStackSpace(int bytes);
483 #else
AllocateStackSpace(Register bytes)484   void AllocateStackSpace(Register bytes) { subq(rsp, bytes); }
AllocateStackSpace(int bytes)485   void AllocateStackSpace(int bytes) {
486     DCHECK_GE(bytes, 0);
487     if (bytes == 0) return;
488     subq(rsp, Immediate(bytes));
489   }
490 #endif
491 
InitializeRootRegister()492   void InitializeRootRegister() {
493     ExternalReference isolate_root = ExternalReference::isolate_root(isolate());
494     Move(kRootRegister, isolate_root);
495 #ifdef V8_COMPRESS_POINTERS_IN_SHARED_CAGE
496     LoadRootRelative(kPtrComprCageBaseRegister,
497                      IsolateData::cage_base_offset());
498 #endif
499   }
500 
501   void MaybeSaveRegisters(RegList registers);
502   void MaybeRestoreRegisters(RegList registers);
503 
504   void CallEphemeronKeyBarrier(Register object, Register slot_address,
505                                SaveFPRegsMode fp_mode);
506 
507   void CallRecordWriteStubSaveRegisters(
508       Register object, Register slot_address,
509       RememberedSetAction remembered_set_action, SaveFPRegsMode fp_mode,
510       StubCallMode mode = StubCallMode::kCallBuiltinPointer);
511   void CallRecordWriteStub(
512       Register object, Register slot_address,
513       RememberedSetAction remembered_set_action, SaveFPRegsMode fp_mode,
514       StubCallMode mode = StubCallMode::kCallBuiltinPointer);
515 
516 #ifdef V8_IS_TSAN
517   void CallTSANStoreStub(Register address, Register value,
518                          SaveFPRegsMode fp_mode, int size, StubCallMode mode,
519                          std::memory_order order);
520   void CallTSANRelaxedLoadStub(Register address, SaveFPRegsMode fp_mode,
521                                int size, StubCallMode mode);
522 #endif  // V8_IS_TSAN
523 
524   void MoveNumber(Register dst, double value);
525   void MoveNonSmi(Register dst, double value);
526 
527   // Calculate how much stack space (in bytes) are required to store caller
528   // registers excluding those specified in the arguments.
529   int RequiredStackSizeForCallerSaved(SaveFPRegsMode fp_mode,
530                                       Register exclusion1 = no_reg,
531                                       Register exclusion2 = no_reg,
532                                       Register exclusion3 = no_reg) const;
533 
534   // PushCallerSaved and PopCallerSaved do not arrange the registers in any
535   // particular order so they are not useful for calls that can cause a GC.
536   // The caller can exclude up to 3 registers that do not need to be saved and
537   // restored.
538 
539   // Push caller saved registers on the stack, and return the number of bytes
540   // stack pointer is adjusted.
541   int PushCallerSaved(SaveFPRegsMode fp_mode, Register exclusion1 = no_reg,
542                       Register exclusion2 = no_reg,
543                       Register exclusion3 = no_reg);
544   // Restore caller saved registers from the stack, and return the number of
545   // bytes stack pointer is adjusted.
546   int PopCallerSaved(SaveFPRegsMode fp_mode, Register exclusion1 = no_reg,
547                      Register exclusion2 = no_reg,
548                      Register exclusion3 = no_reg);
549 
550   // Compute the start of the generated instruction stream from the current PC.
551   // This is an alternative to embedding the {CodeObject} handle as a reference.
552   void ComputeCodeStartAddress(Register dst);
553 
554   // Control-flow integrity:
555 
556   // Define a function entrypoint. This doesn't emit any code for this
557   // architecture, as control-flow integrity is not supported for it.
CodeEntry()558   void CodeEntry() {}
559   // Define an exception handler.
ExceptionHandler()560   void ExceptionHandler() {}
561   // Define an exception handler and bind a label.
BindExceptionHandler(Label * label)562   void BindExceptionHandler(Label* label) { bind(label); }
563 
564   // ---------------------------------------------------------------------------
565   // Pointer compression support
566 
567   // Loads a field containing a HeapObject and decompresses it if pointer
568   // compression is enabled.
569   void LoadTaggedPointerField(Register destination, Operand field_operand);
570 
571   // Loads a field containing a Smi and decompresses it if pointer compression
572   // is enabled.
573   void LoadTaggedSignedField(Register destination, Operand field_operand);
574 
575   // Loads a field containing any tagged value and decompresses it if necessary.
576   void LoadAnyTaggedField(Register destination, Operand field_operand);
577 
578   // Loads a field containing a HeapObject, decompresses it if necessary and
579   // pushes full pointer to the stack. When pointer compression is enabled,
580   // uses |scratch| to decompress the value.
581   void PushTaggedPointerField(Operand field_operand, Register scratch);
582 
583   // Loads a field containing any tagged value, decompresses it if necessary and
584   // pushes the full pointer to the stack. When pointer compression is enabled,
585   // uses |scratch| to decompress the value.
586   void PushTaggedAnyField(Operand field_operand, Register scratch);
587 
588   // Loads a field containing smi value and untags it.
589   void SmiUntagField(Register dst, Operand src);
590 
591   // Compresses tagged value if necessary and stores it to given on-heap
592   // location.
593   void StoreTaggedField(Operand dst_field_operand, Immediate immediate);
594   void StoreTaggedField(Operand dst_field_operand, Register value);
595   void StoreTaggedSignedField(Operand dst_field_operand, Smi value);
596   void AtomicStoreTaggedField(Operand dst_field_operand, Register value);
597 
598   // The following macros work even when pointer compression is not enabled.
599   void DecompressTaggedSigned(Register destination, Operand field_operand);
600   void DecompressTaggedPointer(Register destination, Operand field_operand);
601   void DecompressTaggedPointer(Register destination, Register source);
602   void DecompressAnyTagged(Register destination, Operand field_operand);
603 
604   // ---------------------------------------------------------------------------
605   // V8 Heap sandbox support
606 
607   enum class IsolateRootLocation { kInScratchRegister, kInRootRegister };
608   // Loads a field containing off-heap pointer and does necessary decoding
609   // if V8 heap sandbox is enabled.
610   void LoadExternalPointerField(Register destination, Operand field_operand,
611                                 ExternalPointerTag tag, Register scratch,
612                                 IsolateRootLocation isolateRootLocation =
613                                     IsolateRootLocation::kInRootRegister);
614 
615  protected:
616   static const int kSmiShift = kSmiTagSize + kSmiShiftSize;
617 
618   // Returns a register holding the smi value. The register MUST NOT be
619   // modified. It may be the "smi 1 constant" register.
620   Register GetSmiConstant(Smi value);
621 
622   // Drops arguments assuming that the return address was already popped.
623   void DropArguments(Register count, ArgumentsCountType type = kCountIsInteger,
624                      ArgumentsCountMode mode = kCountExcludesReceiver);
625 };
626 
627 // MacroAssembler implements a collection of frequently used macros.
628 class V8_EXPORT_PRIVATE MacroAssembler : public TurboAssembler {
629  public:
630   using TurboAssembler::TurboAssembler;
631 
632   // Loads and stores the value of an external reference.
633   // Special case code for load and store to take advantage of
634   // load_rax/store_rax if possible/necessary.
635   // For other operations, just use:
636   //   Operand operand = ExternalReferenceAsOperand(extref);
637   //   operation(operand, ..);
638   void Load(Register destination, ExternalReference source);
639   void Store(ExternalReference destination, Register source);
640 
641   // Pushes the address of the external reference onto the stack.
642   void PushAddress(ExternalReference source);
643 
644   // Operations on roots in the root-array.
645   // Load a root value where the index (or part of it) is variable.
646   // The variable_offset register is added to the fixed_offset value
647   // to get the index into the root-array.
648   void PushRoot(RootIndex index);
649 
650   // Compare the object in a register to a value and jump if they are equal.
651   void JumpIfRoot(Register with, RootIndex index, Label* if_equal,
652                   Label::Distance if_equal_distance = Label::kFar) {
653     CompareRoot(with, index);
654     j(equal, if_equal, if_equal_distance);
655   }
656   void JumpIfRoot(Operand with, RootIndex index, Label* if_equal,
657                   Label::Distance if_equal_distance = Label::kFar) {
658     CompareRoot(with, index);
659     j(equal, if_equal, if_equal_distance);
660   }
661 
662   // Compare the object in a register to a value and jump if they are not equal.
663   void JumpIfNotRoot(Register with, RootIndex index, Label* if_not_equal,
664                      Label::Distance if_not_equal_distance = Label::kFar) {
665     CompareRoot(with, index);
666     j(not_equal, if_not_equal, if_not_equal_distance);
667   }
668   void JumpIfNotRoot(Operand with, RootIndex index, Label* if_not_equal,
669                      Label::Distance if_not_equal_distance = Label::kFar) {
670     CompareRoot(with, index);
671     j(not_equal, if_not_equal, if_not_equal_distance);
672   }
673 
674   // ---------------------------------------------------------------------------
675   // GC Support
676 
677   // Notify the garbage collector that we wrote a pointer into an object.
678   // |object| is the object being stored into, |value| is the object being
679   // stored.  value and scratch registers are clobbered by the operation.
680   // The offset is the offset from the start of the object, not the offset from
681   // the tagged HeapObject pointer.  For use with FieldOperand(reg, off).
682   void RecordWriteField(
683       Register object, int offset, Register value, Register slot_address,
684       SaveFPRegsMode save_fp,
685       RememberedSetAction remembered_set_action = RememberedSetAction::kEmit,
686       SmiCheck smi_check = SmiCheck::kInline);
687 
688   // For page containing |object| mark region covering |address|
689   // dirty. |object| is the object being stored into, |value| is the
690   // object being stored. The address and value registers are clobbered by the
691   // operation.  RecordWrite filters out smis so it does not update
692   // the write barrier if the value is a smi.
693   void RecordWrite(
694       Register object, Register slot_address, Register value,
695       SaveFPRegsMode save_fp,
696       RememberedSetAction remembered_set_action = RememberedSetAction::kEmit,
697       SmiCheck smi_check = SmiCheck::kInline);
698 
699   // Enter specific kind of exit frame; either in normal or
700   // debug mode. Expects the number of arguments in register rax and
701   // sets up the number of arguments in register rdi and the pointer
702   // to the first argument in register rsi.
703   //
704   // Allocates arg_stack_space * kSystemPointerSize memory (not GCed) on the
705   // stack accessible via StackSpaceOperand.
706   void EnterExitFrame(int arg_stack_space = 0, bool save_doubles = false,
707                       StackFrame::Type frame_type = StackFrame::EXIT);
708 
709   // Enter specific kind of exit frame. Allocates
710   // (arg_stack_space * kSystemPointerSize) memory (not GCed) on the stack
711   // accessible via StackSpaceOperand.
712   void EnterApiExitFrame(int arg_stack_space);
713 
714   // Leave the current exit frame. Expects/provides the return value in
715   // register rax:rdx (untouched) and the pointer to the first
716   // argument in register rsi (if pop_arguments == true).
717   void LeaveExitFrame(bool save_doubles = false, bool pop_arguments = true);
718 
719   // Leave the current exit frame. Expects/provides the return value in
720   // register rax (untouched).
721   void LeaveApiExitFrame();
722 
723   // ---------------------------------------------------------------------------
724   // JavaScript invokes
725 
726   // Invoke the JavaScript function code by either calling or jumping.
727   void InvokeFunctionCode(Register function, Register new_target,
728                           Register expected_parameter_count,
729                           Register actual_parameter_count, InvokeType type);
730 
731   // On function call, call into the debugger.
732   void CallDebugOnFunctionCall(Register fun, Register new_target,
733                                Register expected_parameter_count,
734                                Register actual_parameter_count);
735 
736   // Invoke the JavaScript function in the given register. Changes the
737   // current context to the context in the function before invoking.
738   void InvokeFunction(Register function, Register new_target,
739                       Register actual_parameter_count, InvokeType type);
740 
741   void InvokeFunction(Register function, Register new_target,
742                       Register expected_parameter_count,
743                       Register actual_parameter_count, InvokeType type);
744 
745   // ---------------------------------------------------------------------------
746   // Macro instructions.
747 
748   using TurboAssembler::Cmp;
749   void Cmp(Register dst, Handle<Object> source);
750   void Cmp(Operand dst, Handle<Object> source);
751 
752   // Checks if value is in range [lower_limit, higher_limit] using a single
753   // comparison. Flags CF=1 or ZF=1 indicate the value is in the range
754   // (condition below_equal).
755   void CompareRange(Register value, unsigned lower_limit,
756                     unsigned higher_limit);
757   void JumpIfIsInRange(Register value, unsigned lower_limit,
758                        unsigned higher_limit, Label* on_in_range,
759                        Label::Distance near_jump = Label::kFar);
760 
761   // Emit code to discard a non-negative number of pointer-sized elements
762   // from the stack, clobbering only the rsp register.
763   void Drop(int stack_elements);
764   // Emit code to discard a positive number of pointer-sized elements
765   // from the stack under the return address which remains on the top,
766   // clobbering the rsp register.
767   void DropUnderReturnAddress(int stack_elements,
768                               Register scratch = kScratchRegister);
769   void PushQuad(Operand src);
770   void PushImm32(int32_t imm32);
771   void Pop(Register dst);
772   void Pop(Operand dst);
773   void PopQuad(Operand dst);
774 
775   // Generates a trampoline to jump to the off-heap instruction stream.
776   void JumpToInstructionStream(Address entry);
777 
778   // Compare object type for heap object.
779   // Always use unsigned comparisons: above and below, not less and greater.
780   // Incoming register is heap_object and outgoing register is map.
781   // They may be the same register, and may be kScratchRegister.
782   void CmpObjectType(Register heap_object, InstanceType type, Register map);
783 
784   // Compare instance type for map.
785   // Always use unsigned comparisons: above and below, not less and greater.
786   void CmpInstanceType(Register map, InstanceType type);
787 
788   // Compare instance type ranges for a map (low and high inclusive)
789   // Always use unsigned comparisons: below_equal for a positive result.
790   void CmpInstanceTypeRange(Register map, Register instance_type_out,
791                             InstanceType low, InstanceType high);
792 
793   template <typename Field>
DecodeField(Register reg)794   void DecodeField(Register reg) {
795     static const int shift = Field::kShift;
796     static const int mask = Field::kMask >> Field::kShift;
797     if (shift != 0) {
798       shrq(reg, Immediate(shift));
799     }
800     andq(reg, Immediate(mask));
801   }
802 
803   // Abort execution if argument is not a CodeT, enabled via --debug-code.
804   void AssertCodeT(Register object);
805 
806   // Abort execution if argument is not a Constructor, enabled via --debug-code.
807   void AssertConstructor(Register object);
808 
809   // Abort execution if argument is not a JSFunction, enabled via --debug-code.
810   void AssertFunction(Register object);
811 
812   // Abort execution if argument is not a JSBoundFunction,
813   // enabled via --debug-code.
814   void AssertBoundFunction(Register object);
815 
816   // Abort execution if argument is not a JSGeneratorObject (or subclass),
817   // enabled via --debug-code.
818   void AssertGeneratorObject(Register object);
819 
820   // Abort execution if argument is not undefined or an AllocationSite, enabled
821   // via --debug-code.
822   void AssertUndefinedOrAllocationSite(Register object);
823 
824   // ---------------------------------------------------------------------------
825   // Exception handling
826 
827   // Push a new stack handler and link it into stack handler chain.
828   void PushStackHandler();
829 
830   // Unlink the stack handler on top of the stack from the stack handler chain.
831   void PopStackHandler();
832 
833   // ---------------------------------------------------------------------------
834   // Support functions.
835 
836   // Load the global proxy from the current context.
LoadGlobalProxy(Register dst)837   void LoadGlobalProxy(Register dst) {
838     LoadNativeContextSlot(dst, Context::GLOBAL_PROXY_INDEX);
839   }
840 
841   // Load the native context slot with the current index.
842   void LoadNativeContextSlot(Register dst, int index);
843 
844   // ---------------------------------------------------------------------------
845   // Runtime calls
846 
847   // Call a runtime routine.
848   void CallRuntime(const Runtime::Function* f, int num_arguments,
849                    SaveFPRegsMode save_doubles = SaveFPRegsMode::kIgnore);
850 
851   // Convenience function: Same as above, but takes the fid instead.
852   void CallRuntime(Runtime::FunctionId fid,
853                    SaveFPRegsMode save_doubles = SaveFPRegsMode::kIgnore) {
854     const Runtime::Function* function = Runtime::FunctionForId(fid);
855     CallRuntime(function, function->nargs, save_doubles);
856   }
857 
858   // Convenience function: Same as above, but takes the fid instead.
859   void CallRuntime(Runtime::FunctionId fid, int num_arguments,
860                    SaveFPRegsMode save_doubles = SaveFPRegsMode::kIgnore) {
861     CallRuntime(Runtime::FunctionForId(fid), num_arguments, save_doubles);
862   }
863 
864   // Convenience function: tail call a runtime routine (jump)
865   void TailCallRuntime(Runtime::FunctionId fid);
866 
867   // Jump to a runtime routines
868   void JumpToExternalReference(const ExternalReference& ext,
869                                bool builtin_exit_frame = false);
870 
871   // ---------------------------------------------------------------------------
872   // StatsCounter support
IncrementCounter(StatsCounter * counter,int value)873   void IncrementCounter(StatsCounter* counter, int value) {
874     if (!FLAG_native_code_counters) return;
875     EmitIncrementCounter(counter, value);
876   }
877   void EmitIncrementCounter(StatsCounter* counter, int value);
DecrementCounter(StatsCounter * counter,int value)878   void DecrementCounter(StatsCounter* counter, int value) {
879     if (!FLAG_native_code_counters) return;
880     EmitDecrementCounter(counter, value);
881   }
882   void EmitDecrementCounter(StatsCounter* counter, int value);
883 
884   // ---------------------------------------------------------------------------
885   // Stack limit utilities
886   Operand StackLimitAsOperand(StackLimitKind kind);
887   void StackOverflowCheck(
888       Register num_args, Label* stack_overflow,
889       Label::Distance stack_overflow_distance = Label::kFar);
890 
891   // ---------------------------------------------------------------------------
892   // In-place weak references.
893   void LoadWeakValue(Register in_out, Label* target_if_cleared);
894 
895  private:
896   // Helper functions for generating invokes.
897   void InvokePrologue(Register expected_parameter_count,
898                       Register actual_parameter_count, Label* done,
899                       InvokeType type);
900 
901   void EnterExitFramePrologue(Register saved_rax_reg,
902                               StackFrame::Type frame_type);
903 
904   // Allocates arg_stack_space * kSystemPointerSize memory (not GCed) on the
905   // stack accessible via StackSpaceOperand.
906   void EnterExitFrameEpilogue(int arg_stack_space, bool save_doubles);
907 
908   void LeaveExitFrameEpilogue();
909 
910   DISALLOW_IMPLICIT_CONSTRUCTORS(MacroAssembler);
911 };
912 
913 // -----------------------------------------------------------------------------
914 // Static helper functions.
915 
916 // Generate an Operand for loading a field from an object.
FieldOperand(Register object,int offset)917 inline Operand FieldOperand(Register object, int offset) {
918   return Operand(object, offset - kHeapObjectTag);
919 }
920 
921 // Generate an Operand for loading an indexed field from an object.
FieldOperand(Register object,Register index,ScaleFactor scale,int offset)922 inline Operand FieldOperand(Register object, Register index, ScaleFactor scale,
923                             int offset) {
924   return Operand(object, index, scale, offset - kHeapObjectTag);
925 }
926 
927 // Provides access to exit frame stack space (not GCed).
StackSpaceOperand(int index)928 inline Operand StackSpaceOperand(int index) {
929 #ifdef V8_TARGET_OS_WIN
930   const int kShaddowSpace = 4;
931   return Operand(rsp, (index + kShaddowSpace) * kSystemPointerSize);
932 #else
933   return Operand(rsp, index * kSystemPointerSize);
934 #endif
935 }
936 
StackOperandForReturnAddress(int32_t disp)937 inline Operand StackOperandForReturnAddress(int32_t disp) {
938   return Operand(rsp, disp);
939 }
940 
941 #define ACCESS_MASM(masm) masm->
942 
943 }  // namespace internal
944 }  // namespace v8
945 
946 #endif  // V8_CODEGEN_X64_MACRO_ASSEMBLER_X64_H_
947