1 // Copyright 2015 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #ifndef V8_INTERPRETER_INTERPRETER_ASSEMBLER_H_
6 #define V8_INTERPRETER_INTERPRETER_ASSEMBLER_H_
7 
8 #include "src/allocation.h"
9 #include "src/builtins/builtins.h"
10 #include "src/code-stub-assembler.h"
11 #include "src/globals.h"
12 #include "src/interpreter/bytecode-register.h"
13 #include "src/interpreter/bytecodes.h"
14 #include "src/runtime/runtime.h"
15 
16 namespace v8 {
17 namespace internal {
18 namespace interpreter {
19 
20 class V8_EXPORT_PRIVATE InterpreterAssembler : public CodeStubAssembler {
21  public:
22   InterpreterAssembler(compiler::CodeAssemblerState* state, Bytecode bytecode,
23                        OperandScale operand_scale);
24   ~InterpreterAssembler();
25 
26   // Returns the 32-bit unsigned count immediate for bytecode operand
27   // |operand_index| in the current bytecode.
28   compiler::Node* BytecodeOperandCount(int operand_index);
29   // Returns the 32-bit unsigned flag for bytecode operand |operand_index|
30   // in the current bytecode.
31   compiler::Node* BytecodeOperandFlag(int operand_index);
32   // Returns the 32-bit zero-extended index immediate for bytecode operand
33   // |operand_index| in the current bytecode.
34   compiler::Node* BytecodeOperandIdxInt32(int operand_index);
35   // Returns the word zero-extended index immediate for bytecode operand
36   // |operand_index| in the current bytecode.
37   compiler::Node* BytecodeOperandIdx(int operand_index);
38   // Returns the smi index immediate for bytecode operand |operand_index|
39   // in the current bytecode.
40   compiler::Node* BytecodeOperandIdxSmi(int operand_index);
41   // Returns the 32-bit unsigned immediate for bytecode operand |operand_index|
42   // in the current bytecode.
43   compiler::Node* BytecodeOperandUImm(int operand_index);
44   // Returns the word-size unsigned immediate for bytecode operand
45   // |operand_index| in the current bytecode.
46   compiler::Node* BytecodeOperandUImmWord(int operand_index);
47   // Returns the unsigned smi immediate for bytecode operand |operand_index| in
48   // the current bytecode.
49   compiler::Node* BytecodeOperandUImmSmi(int operand_index);
50   // Returns the 32-bit signed immediate for bytecode operand |operand_index|
51   // in the current bytecode.
52   compiler::Node* BytecodeOperandImm(int operand_index);
53   // Returns the word-size signed immediate for bytecode operand |operand_index|
54   // in the current bytecode.
55   compiler::Node* BytecodeOperandImmIntPtr(int operand_index);
56   // Returns the smi immediate for bytecode operand |operand_index| in the
57   // current bytecode.
58   compiler::Node* BytecodeOperandImmSmi(int operand_index);
59   // Returns the 32-bit unsigned runtime id immediate for bytecode operand
60   // |operand_index| in the current bytecode.
61   compiler::Node* BytecodeOperandRuntimeId(int operand_index);
62   // Returns the 32-bit unsigned native context index immediate for bytecode
63   // operand |operand_index| in the current bytecode.
64   compiler::Node* BytecodeOperandNativeContextIndex(int operand_index);
65   // Returns the 32-bit unsigned intrinsic id immediate for bytecode operand
66   // |operand_index| in the current bytecode.
67   compiler::Node* BytecodeOperandIntrinsicId(int operand_index);
68 
69   // Accumulator.
70   compiler::Node* GetAccumulator();
71   void SetAccumulator(compiler::Node* value);
72 
73   // Context.
74   compiler::Node* GetContext();
75   void SetContext(compiler::Node* value);
76 
77   // Context at |depth| in the context chain starting at |context|.
78   compiler::Node* GetContextAtDepth(compiler::Node* context,
79                                     compiler::Node* depth);
80 
81   // Goto the given |target| if the context chain starting at |context| has any
82   // extensions up to the given |depth|.
83   void GotoIfHasContextExtensionUpToDepth(compiler::Node* context,
84                                           compiler::Node* depth, Label* target);
85 
86   // A RegListNodePair provides an abstraction over lists of registers.
87   class RegListNodePair {
88    public:
RegListNodePair(Node * base_reg_location,Node * reg_count)89     RegListNodePair(Node* base_reg_location, Node* reg_count)
90         : base_reg_location_(base_reg_location), reg_count_(reg_count) {}
91 
reg_count()92     compiler::Node* reg_count() const { return reg_count_; }
base_reg_location()93     compiler::Node* base_reg_location() const { return base_reg_location_; }
94 
95    private:
96     compiler::Node* base_reg_location_;
97     compiler::Node* reg_count_;
98   };
99 
100   // Backup/restore register file to/from a fixed array of the correct length.
101   compiler::Node* ExportRegisterFile(compiler::Node* array,
102                                      const RegListNodePair& registers);
103   compiler::Node* ImportRegisterFile(compiler::Node* array,
104                                      const RegListNodePair& registers);
105 
106   // Loads from and stores to the interpreter register file.
107   compiler::Node* LoadRegister(Register reg);
108   compiler::Node* LoadAndUntagRegister(Register reg);
109   compiler::Node* LoadRegisterAtOperandIndex(int operand_index);
110   std::pair<compiler::Node*, compiler::Node*> LoadRegisterPairAtOperandIndex(
111       int operand_index);
112   void StoreRegister(compiler::Node* value, Register reg);
113   void StoreAndTagRegister(compiler::Node* value, Register reg);
114   void StoreRegisterAtOperandIndex(compiler::Node* value, int operand_index);
115   void StoreRegisterPairAtOperandIndex(compiler::Node* value1,
116                                        compiler::Node* value2,
117                                        int operand_index);
118   void StoreRegisterTripleAtOperandIndex(compiler::Node* value1,
119                                          compiler::Node* value2,
120                                          compiler::Node* value3,
121                                          int operand_index);
122 
123   RegListNodePair GetRegisterListAtOperandIndex(int operand_index);
124   Node* LoadRegisterFromRegisterList(const RegListNodePair& reg_list,
125                                      int index);
126   Node* RegisterLocationInRegisterList(const RegListNodePair& reg_list,
127                                        int index);
128 
129   // Load constant at the index specified in operand |operand_index| from the
130   // constant pool.
131   compiler::Node* LoadConstantPoolEntryAtOperandIndex(int operand_index);
132   // Load and untag constant at the index specified in operand |operand_index|
133   // from the constant pool.
134   compiler::Node* LoadAndUntagConstantPoolEntryAtOperandIndex(
135       int operand_index);
136   // Load constant at |index| in the constant pool.
137   compiler::Node* LoadConstantPoolEntry(compiler::Node* index);
138   // Load and untag constant at |index| in the constant pool.
139   compiler::Node* LoadAndUntagConstantPoolEntry(compiler::Node* index);
140 
141   // Load the FeedbackVector for the current function.
142   compiler::Node* LoadFeedbackVector();
143 
144   // Increment the call count for a CALL_IC or construct call.
145   // The call count is located at feedback_vector[slot_id + 1].
146   void IncrementCallCount(compiler::Node* feedback_vector,
147                           compiler::Node* slot_id);
148 
149   // Collect the callable |target| feedback for either a CALL_IC or
150   // an INSTANCEOF_IC in the |feedback_vector| at |slot_id|.
151   void CollectCallableFeedback(compiler::Node* target, compiler::Node* context,
152                                compiler::Node* feedback_vector,
153                                compiler::Node* slot_id);
154 
155   // Collect CALL_IC feedback for |target| function in the
156   // |feedback_vector| at |slot_id|, and the call counts in
157   // the |feedback_vector| at |slot_id+1|.
158   void CollectCallFeedback(compiler::Node* target, compiler::Node* context,
159                            compiler::Node* feedback_vector,
160                            compiler::Node* slot_id);
161 
162   // Call JSFunction or Callable |function| with |args| arguments, possibly
163   // including the receiver depending on |receiver_mode|. After the call returns
164   // directly dispatches to the next bytecode.
165   void CallJSAndDispatch(compiler::Node* function, compiler::Node* context,
166                          const RegListNodePair& args,
167                          ConvertReceiverMode receiver_mode);
168 
169   // Call JSFunction or Callable |function| with |arg_count| arguments (not
170   // including receiver) passed as |args|, possibly including the receiver
171   // depending on |receiver_mode|. After the call returns directly dispatches to
172   // the next bytecode.
173   template <class... TArgs>
174   void CallJSAndDispatch(Node* function, Node* context, Node* arg_count,
175                          ConvertReceiverMode receiver_mode, TArgs... args);
176 
177   // Call JSFunction or Callable |function| with |args|
178   // arguments (not including receiver), and the final argument being spread.
179   // After the call returns directly dispatches to the next bytecode.
180   void CallJSWithSpreadAndDispatch(compiler::Node* function,
181                                    compiler::Node* context,
182                                    const RegListNodePair& args,
183                                    compiler::Node* slot_id,
184                                    compiler::Node* feedback_vector);
185 
186   // Call constructor |target| with |args| arguments (not including receiver).
187   // The |new_target| is the same as the |target| for the new keyword, but
188   // differs for the super keyword.
189   compiler::Node* Construct(compiler::Node* target, compiler::Node* context,
190                             compiler::Node* new_target,
191                             const RegListNodePair& args,
192                             compiler::Node* slot_id,
193                             compiler::Node* feedback_vector);
194 
195   // Call constructor |target| with |args| arguments (not including
196   // receiver). The last argument is always a spread. The |new_target| is the
197   // same as the |target| for the new keyword, but differs for the super
198   // keyword.
199   compiler::Node* ConstructWithSpread(compiler::Node* target,
200                                       compiler::Node* context,
201                                       compiler::Node* new_target,
202                                       const RegListNodePair& args,
203                                       compiler::Node* slot_id,
204                                       compiler::Node* feedback_vector);
205 
206   // Call runtime function with |args| arguments which will return |return_size|
207   // number of values.
208   compiler::Node* CallRuntimeN(compiler::Node* function_id,
209                                compiler::Node* context,
210                                const RegListNodePair& args,
211                                int return_size = 1);
212 
213   // Jump forward relative to the current bytecode by the |jump_offset|.
214   compiler::Node* Jump(compiler::Node* jump_offset);
215 
216   // Jump backward relative to the current bytecode by the |jump_offset|.
217   compiler::Node* JumpBackward(compiler::Node* jump_offset);
218 
219   // Jump forward relative to the current bytecode by |jump_offset| if the
220   // word values |lhs| and |rhs| are equal.
221   void JumpIfWordEqual(compiler::Node* lhs, compiler::Node* rhs,
222                        compiler::Node* jump_offset);
223 
224   // Jump forward relative to the current bytecode by |jump_offset| if the
225   // word values |lhs| and |rhs| are not equal.
226   void JumpIfWordNotEqual(compiler::Node* lhs, compiler::Node* rhs,
227                           compiler::Node* jump_offset);
228 
229   // Updates the profiler interrupt budget for a return.
230   void UpdateInterruptBudgetOnReturn();
231 
232   // Returns the OSR nesting level from the bytecode header.
233   compiler::Node* LoadOSRNestingLevel();
234 
235   // Dispatch to the bytecode.
236   compiler::Node* Dispatch();
237 
238   // Dispatch bytecode as wide operand variant.
239   void DispatchWide(OperandScale operand_scale);
240 
241   // Dispatch to |target_bytecode| at |new_bytecode_offset|.
242   // |target_bytecode| should be equivalent to loading from the offset.
243   compiler::Node* DispatchToBytecode(compiler::Node* target_bytecode,
244                                      compiler::Node* new_bytecode_offset);
245 
246   // Abort with the given abort reason.
247   void Abort(AbortReason abort_reason);
248   void AbortIfWordNotEqual(compiler::Node* lhs, compiler::Node* rhs,
249                            AbortReason abort_reason);
250   // Abort if |register_count| is invalid for given register file array.
251   void AbortIfRegisterCountInvalid(compiler::Node* register_file,
252                                    compiler::Node* register_count);
253 
254   // Dispatch to frame dropper trampoline if necessary.
255   void MaybeDropFrames(compiler::Node* context);
256 
257   // Returns the offset from the BytecodeArrayPointer of the current bytecode.
258   compiler::Node* BytecodeOffset();
259 
260  protected:
bytecode()261   Bytecode bytecode() const { return bytecode_; }
262   static bool TargetSupportsUnalignedAccess();
263 
264   void ToNumberOrNumeric(Object::Conversion mode);
265 
266   // Lazily deserializes the current bytecode's handler and tail-calls into it.
267   void DeserializeLazyAndDispatch();
268 
269  private:
270   // Returns a tagged pointer to the current function's BytecodeArray object.
271   compiler::Node* BytecodeArrayTaggedPointer();
272 
273   // Returns a raw pointer to first entry in the interpreter dispatch table.
274   compiler::Node* DispatchTableRawPointer();
275 
276   // Returns the accumulator value without checking whether bytecode
277   // uses it. This is intended to be used only in dispatch and in
278   // tracing as these need to bypass accumulator use validity checks.
279   compiler::Node* GetAccumulatorUnchecked();
280 
281   // Returns the frame pointer for the interpreted frame of the function being
282   // interpreted.
283   compiler::Node* GetInterpretedFramePointer();
284 
285   // Operations on registers.
286   compiler::Node* RegisterLocation(Register reg);
287   compiler::Node* RegisterLocation(compiler::Node* reg_index);
288   compiler::Node* NextRegister(compiler::Node* reg_index);
289   compiler::Node* LoadRegister(Node* reg_index);
290   void StoreRegister(compiler::Node* value, compiler::Node* reg_index);
291 
292   // Saves and restores interpreter bytecode offset to the interpreter stack
293   // frame when performing a call.
294   void CallPrologue();
295   void CallEpilogue();
296 
297   // Increment the dispatch counter for the (current, next) bytecode pair.
298   void TraceBytecodeDispatch(compiler::Node* target_index);
299 
300   // Traces the current bytecode by calling |function_id|.
301   void TraceBytecode(Runtime::FunctionId function_id);
302 
303   // Updates the bytecode array's interrupt budget by a 32-bit unsigned |weight|
304   // and calls Runtime::kInterrupt if counter reaches zero. If |backward|, then
305   // the interrupt budget is decremented, otherwise it is incremented.
306   void UpdateInterruptBudget(compiler::Node* weight, bool backward);
307 
308   // Returns the offset of register |index| relative to RegisterFilePointer().
309   compiler::Node* RegisterFrameOffset(compiler::Node* index);
310 
311   // Returns the offset of an operand relative to the current bytecode offset.
312   compiler::Node* OperandOffset(int operand_index);
313 
314   // Returns a value built from an sequence of bytes in the bytecode
315   // array starting at |relative_offset| from the current bytecode.
316   // The |result_type| determines the size and signedness.  of the
317   // value read. This method should only be used on architectures that
318   // do not support unaligned memory accesses.
319   compiler::Node* BytecodeOperandReadUnaligned(
320       int relative_offset, MachineType result_type,
321       LoadSensitivity needs_poisoning = LoadSensitivity::kCritical);
322 
323   // Returns zero- or sign-extended to word32 value of the operand.
324   compiler::Node* BytecodeOperandUnsignedByte(
325       int operand_index,
326       LoadSensitivity needs_poisoning = LoadSensitivity::kCritical);
327   compiler::Node* BytecodeOperandSignedByte(
328       int operand_index,
329       LoadSensitivity needs_poisoning = LoadSensitivity::kCritical);
330   compiler::Node* BytecodeOperandUnsignedShort(
331       int operand_index,
332       LoadSensitivity needs_poisoning = LoadSensitivity::kCritical);
333   compiler::Node* BytecodeOperandSignedShort(
334       int operand_index,
335       LoadSensitivity needs_poisoning = LoadSensitivity::kCritical);
336   compiler::Node* BytecodeOperandUnsignedQuad(
337       int operand_index,
338       LoadSensitivity needs_poisoning = LoadSensitivity::kCritical);
339   compiler::Node* BytecodeOperandSignedQuad(
340       int operand_index,
341       LoadSensitivity needs_poisoning = LoadSensitivity::kCritical);
342 
343   // Returns zero- or sign-extended to word32 value of the operand of
344   // given size.
345   compiler::Node* BytecodeSignedOperand(
346       int operand_index, OperandSize operand_size,
347       LoadSensitivity needs_poisoning = LoadSensitivity::kCritical);
348   compiler::Node* BytecodeUnsignedOperand(
349       int operand_index, OperandSize operand_size,
350       LoadSensitivity needs_poisoning = LoadSensitivity::kCritical);
351 
352   // Returns the word-size sign-extended register index for bytecode operand
353   // |operand_index| in the current bytecode. Value is not poisoned on
354   // speculation since the value loaded from the register is poisoned instead.
355   compiler::Node* BytecodeOperandReg(
356       int operand_index,
357       LoadSensitivity needs_poisoning = LoadSensitivity::kCritical);
358 
359   // Returns the word zero-extended index immediate for bytecode operand
360   // |operand_index| in the current bytecode for use when loading a .
361   compiler::Node* BytecodeOperandConstantPoolIdx(
362       int operand_index,
363       LoadSensitivity needs_poisoning = LoadSensitivity::kCritical);
364 
365   // Jump relative to the current bytecode by the |jump_offset|. If |backward|,
366   // then jump backward (subtract the offset), otherwise jump forward (add the
367   // offset). Helper function for Jump and JumpBackward.
368   compiler::Node* Jump(compiler::Node* jump_offset, bool backward);
369 
370   // Jump forward relative to the current bytecode by |jump_offset| if the
371   // |condition| is true. Helper function for JumpIfWordEqual and
372   // JumpIfWordNotEqual.
373   void JumpConditional(compiler::Node* condition, compiler::Node* jump_offset);
374 
375   // Save the bytecode offset to the interpreter frame.
376   void SaveBytecodeOffset();
377   // Reload the bytecode offset from the interpreter frame.
378   Node* ReloadBytecodeOffset();
379 
380   // Updates and returns BytecodeOffset() advanced by the current bytecode's
381   // size. Traces the exit of the current bytecode.
382   compiler::Node* Advance();
383 
384   // Updates and returns BytecodeOffset() advanced by delta bytecodes.
385   // Traces the exit of the current bytecode.
386   compiler::Node* Advance(int delta);
387   compiler::Node* Advance(compiler::Node* delta, bool backward = false);
388 
389   // Load the bytecode at |bytecode_offset|.
390   compiler::Node* LoadBytecode(compiler::Node* bytecode_offset);
391 
392   // Look ahead for Star and inline it in a branch. Returns a new target
393   // bytecode node for dispatch.
394   compiler::Node* StarDispatchLookahead(compiler::Node* target_bytecode);
395 
396   // Build code for Star at the current BytecodeOffset() and Advance() to the
397   // next dispatch offset.
398   void InlineStar();
399 
400   // Dispatch to the bytecode handler with code offset |handler|.
401   compiler::Node* DispatchToBytecodeHandler(compiler::Node* handler,
402                                             compiler::Node* bytecode_offset,
403                                             compiler::Node* target_bytecode);
404 
405   // Dispatch to the bytecode handler with code entry point |handler_entry|.
406   compiler::Node* DispatchToBytecodeHandlerEntry(
407       compiler::Node* handler_entry, compiler::Node* bytecode_offset,
408       compiler::Node* target_bytecode);
409 
410   int CurrentBytecodeSize() const;
411 
operand_scale()412   OperandScale operand_scale() const { return operand_scale_; }
413 
414   Bytecode bytecode_;
415   OperandScale operand_scale_;
416   CodeStubAssembler::Variable interpreted_frame_pointer_;
417   CodeStubAssembler::Variable bytecode_array_;
418   CodeStubAssembler::Variable bytecode_offset_;
419   CodeStubAssembler::Variable dispatch_table_;
420   CodeStubAssembler::Variable accumulator_;
421   AccumulatorUse accumulator_use_;
422   bool made_call_;
423   bool reloaded_frame_ptr_;
424   bool bytecode_array_valid_;
425   bool disable_stack_check_across_call_;
426   compiler::Node* stack_pointer_before_call_;
427 
428   DISALLOW_COPY_AND_ASSIGN(InterpreterAssembler);
429 };
430 
431 }  // namespace interpreter
432 }  // namespace internal
433 }  // namespace v8
434 
435 #endif  // V8_INTERPRETER_INTERPRETER_ASSEMBLER_H_
436