1 // Copyright 2016 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #ifndef V8_OPTIMIZED_COMPILATION_INFO_H_
6 #define V8_OPTIMIZED_COMPILATION_INFO_H_
7 
8 #include <memory>
9 
10 #include "src/bailout-reason.h"
11 #include "src/compilation-dependencies.h"
12 #include "src/feedback-vector.h"
13 #include "src/frames.h"
14 #include "src/globals.h"
15 #include "src/handles.h"
16 #include "src/objects.h"
17 #include "src/source-position-table.h"
18 #include "src/utils.h"
19 #include "src/vector.h"
20 
21 namespace v8 {
22 namespace internal {
23 
24 class CoverageInfo;
25 class DeclarationScope;
26 class DeferredHandles;
27 class FunctionLiteral;
28 class Isolate;
29 class JavaScriptFrame;
30 class ParseInfo;
31 class SourceRangeMap;
32 class Zone;
33 
34 // OptimizedCompilationInfo encapsulates the information needed to compile
35 // optimized code for a given function, and the results of the optimized
36 // compilation.
37 class V8_EXPORT_PRIVATE OptimizedCompilationInfo final {
38  public:
39   // Various configuration flags for a compilation, as well as some properties
40   // of the compiled code produced by a compilation.
41   enum Flag {
42     kAccessorInliningEnabled = 1 << 0,
43     kFunctionContextSpecializing = 1 << 1,
44     kInliningEnabled = 1 << 2,
45     kDisableFutureOptimization = 1 << 3,
46     kSplittingEnabled = 1 << 4,
47     kSourcePositionsEnabled = 1 << 5,
48     kBailoutOnUninitialized = 1 << 6,
49     kLoopPeelingEnabled = 1 << 7,
50     kUntrustedCodeMitigations = 1 << 8,
51     kSwitchJumpTableEnabled = 1 << 9,
52     kCalledWithCodeStartRegister = 1 << 10,
53     kPoisonRegisterArguments = 1 << 11,
54     kAllocationFoldingEnabled = 1 << 12,
55     kAnalyzeEnvironmentLiveness = 1 << 13,
56     kTraceTurboJson = 1 << 14,
57     kTraceTurboGraph = 1 << 15,
58     kTraceTurboScheduled = 1 << 16,
59   };
60 
61   // TODO(mtrofin): investigate if this might be generalized outside wasm, with
62   // the goal of better separating the compiler from where compilation lands. At
63   // that point, the Handle<Code> member of OptimizedCompilationInfo would also
64   // be removed.
65   struct WasmCodeDesc {
66     CodeDesc code_desc;
67     size_t safepoint_table_offset = 0;
68     size_t handler_table_offset = 0;
69     uint32_t frame_slot_count = 0;
70     Handle<ByteArray> source_positions_table;
71   };
72 
73   // Construct a compilation info for optimized compilation.
74   OptimizedCompilationInfo(Zone* zone, Isolate* isolate,
75                            Handle<SharedFunctionInfo> shared,
76                            Handle<JSFunction> closure);
77   // Construct a compilation info for stub compilation, Wasm, and testing.
78   OptimizedCompilationInfo(Vector<const char> debug_name, Zone* zone,
79                            Code::Kind code_kind);
80 
81   ~OptimizedCompilationInfo();
82 
zone()83   Zone* zone() { return zone_; }
is_osr()84   bool is_osr() const { return !osr_offset_.IsNone(); }
shared_info()85   Handle<SharedFunctionInfo> shared_info() const { return shared_info_; }
has_shared_info()86   bool has_shared_info() const { return !shared_info().is_null(); }
closure()87   Handle<JSFunction> closure() const { return closure_; }
code()88   Handle<Code> code() const { return code_; }
abstract_code_kind()89   AbstractCode::Kind abstract_code_kind() const { return code_kind_; }
code_kind()90   Code::Kind code_kind() const {
91     DCHECK(code_kind_ < static_cast<AbstractCode::Kind>(Code::NUMBER_OF_KINDS));
92     return static_cast<Code::Kind>(code_kind_);
93   }
stub_key()94   uint32_t stub_key() const { return stub_key_; }
set_stub_key(uint32_t stub_key)95   void set_stub_key(uint32_t stub_key) { stub_key_ = stub_key; }
builtin_index()96   int32_t builtin_index() const { return builtin_index_; }
set_builtin_index(int32_t index)97   void set_builtin_index(int32_t index) { builtin_index_ = index; }
osr_offset()98   BailoutId osr_offset() const { return osr_offset_; }
osr_frame()99   JavaScriptFrame* osr_frame() const { return osr_frame_; }
100 
101   // Flags used by optimized compilation.
102 
MarkAsFunctionContextSpecializing()103   void MarkAsFunctionContextSpecializing() {
104     SetFlag(kFunctionContextSpecializing);
105   }
is_function_context_specializing()106   bool is_function_context_specializing() const {
107     return GetFlag(kFunctionContextSpecializing);
108   }
109 
MarkAsAccessorInliningEnabled()110   void MarkAsAccessorInliningEnabled() { SetFlag(kAccessorInliningEnabled); }
is_accessor_inlining_enabled()111   bool is_accessor_inlining_enabled() const {
112     return GetFlag(kAccessorInliningEnabled);
113   }
114 
MarkAsSourcePositionsEnabled()115   void MarkAsSourcePositionsEnabled() { SetFlag(kSourcePositionsEnabled); }
is_source_positions_enabled()116   bool is_source_positions_enabled() const {
117     return GetFlag(kSourcePositionsEnabled);
118   }
119 
MarkAsInliningEnabled()120   void MarkAsInliningEnabled() { SetFlag(kInliningEnabled); }
is_inlining_enabled()121   bool is_inlining_enabled() const { return GetFlag(kInliningEnabled); }
122 
SetPoisoningMitigationLevel(PoisoningMitigationLevel poisoning_level)123   void SetPoisoningMitigationLevel(PoisoningMitigationLevel poisoning_level) {
124     poisoning_level_ = poisoning_level;
125   }
GetPoisoningMitigationLevel()126   PoisoningMitigationLevel GetPoisoningMitigationLevel() const {
127     return poisoning_level_;
128   }
129 
MarkAsSplittingEnabled()130   void MarkAsSplittingEnabled() { SetFlag(kSplittingEnabled); }
is_splitting_enabled()131   bool is_splitting_enabled() const { return GetFlag(kSplittingEnabled); }
132 
MarkAsBailoutOnUninitialized()133   void MarkAsBailoutOnUninitialized() { SetFlag(kBailoutOnUninitialized); }
is_bailout_on_uninitialized()134   bool is_bailout_on_uninitialized() const {
135     return GetFlag(kBailoutOnUninitialized);
136   }
137 
MarkAsLoopPeelingEnabled()138   void MarkAsLoopPeelingEnabled() { SetFlag(kLoopPeelingEnabled); }
is_loop_peeling_enabled()139   bool is_loop_peeling_enabled() const { return GetFlag(kLoopPeelingEnabled); }
140 
has_untrusted_code_mitigations()141   bool has_untrusted_code_mitigations() const {
142     return GetFlag(kUntrustedCodeMitigations);
143   }
144 
switch_jump_table_enabled()145   bool switch_jump_table_enabled() const {
146     return GetFlag(kSwitchJumpTableEnabled);
147   }
148 
called_with_code_start_register()149   bool called_with_code_start_register() const {
150     bool enabled = GetFlag(kCalledWithCodeStartRegister);
151     return enabled;
152   }
153 
MarkAsPoisoningRegisterArguments()154   void MarkAsPoisoningRegisterArguments() {
155     DCHECK(has_untrusted_code_mitigations());
156     SetFlag(kPoisonRegisterArguments);
157   }
is_poisoning_register_arguments()158   bool is_poisoning_register_arguments() const {
159     bool enabled = GetFlag(kPoisonRegisterArguments);
160     DCHECK_IMPLIES(enabled, has_untrusted_code_mitigations());
161     DCHECK_IMPLIES(enabled, called_with_code_start_register());
162     return enabled;
163   }
164 
MarkAsAllocationFoldingEnabled()165   void MarkAsAllocationFoldingEnabled() { SetFlag(kAllocationFoldingEnabled); }
is_allocation_folding_enabled()166   bool is_allocation_folding_enabled() const {
167     return GetFlag(kAllocationFoldingEnabled);
168   }
169 
MarkAsAnalyzeEnvironmentLiveness()170   void MarkAsAnalyzeEnvironmentLiveness() {
171     SetFlag(kAnalyzeEnvironmentLiveness);
172   }
is_analyze_environment_liveness()173   bool is_analyze_environment_liveness() const {
174     return GetFlag(kAnalyzeEnvironmentLiveness);
175   }
176 
trace_turbo_json_enabled()177   bool trace_turbo_json_enabled() const { return GetFlag(kTraceTurboJson); }
178 
trace_turbo_graph_enabled()179   bool trace_turbo_graph_enabled() const { return GetFlag(kTraceTurboGraph); }
180 
trace_turbo_scheduled_enabled()181   bool trace_turbo_scheduled_enabled() const {
182     return GetFlag(kTraceTurboScheduled);
183   }
184 
185   // Code getters and setters.
186 
SetCode(Handle<Code> code)187   void SetCode(Handle<Code> code) { code_ = code; }
188 
189   bool has_context() const;
190   Context* context() const;
191 
192   bool has_native_context() const;
193   Context* native_context() const;
194 
195   bool has_global_object() const;
196   JSGlobalObject* global_object() const;
197 
198   // Accessors for the different compilation modes.
IsOptimizing()199   bool IsOptimizing() const {
200     return abstract_code_kind() == AbstractCode::OPTIMIZED_FUNCTION;
201   }
IsWasm()202   bool IsWasm() const {
203     return abstract_code_kind() == AbstractCode::WASM_FUNCTION;
204   }
IsStub()205   bool IsStub() const {
206     return abstract_code_kind() != AbstractCode::OPTIMIZED_FUNCTION &&
207            abstract_code_kind() != AbstractCode::WASM_FUNCTION;
208   }
SetOptimizingForOsr(BailoutId osr_offset,JavaScriptFrame * osr_frame)209   void SetOptimizingForOsr(BailoutId osr_offset, JavaScriptFrame* osr_frame) {
210     DCHECK(IsOptimizing());
211     osr_offset_ = osr_offset;
212     osr_frame_ = osr_frame;
213   }
214 
215   void set_deferred_handles(std::shared_ptr<DeferredHandles> deferred_handles);
216   void set_deferred_handles(DeferredHandles* deferred_handles);
deferred_handles()217   std::shared_ptr<DeferredHandles> deferred_handles() {
218     return deferred_handles_;
219   }
220 
221   void ReopenHandlesInNewHandleScope();
222 
AbortOptimization(BailoutReason reason)223   void AbortOptimization(BailoutReason reason) {
224     DCHECK_NE(reason, BailoutReason::kNoReason);
225     if (bailout_reason_ == BailoutReason::kNoReason) bailout_reason_ = reason;
226     SetFlag(kDisableFutureOptimization);
227   }
228 
RetryOptimization(BailoutReason reason)229   void RetryOptimization(BailoutReason reason) {
230     DCHECK_NE(reason, BailoutReason::kNoReason);
231     if (GetFlag(kDisableFutureOptimization)) return;
232     bailout_reason_ = reason;
233   }
234 
bailout_reason()235   BailoutReason bailout_reason() const { return bailout_reason_; }
236 
dependencies()237   CompilationDependencies* dependencies() { return dependencies_.get(); }
238 
optimization_id()239   int optimization_id() const {
240     DCHECK(IsOptimizing());
241     return optimization_id_;
242   }
243 
244   struct InlinedFunctionHolder {
245     Handle<SharedFunctionInfo> shared_info;
246 
247     InliningPosition position;
248 
InlinedFunctionHolderInlinedFunctionHolder249     InlinedFunctionHolder(Handle<SharedFunctionInfo> inlined_shared_info,
250                           SourcePosition pos)
251         : shared_info(inlined_shared_info) {
252       position.position = pos;
253       // initialized when generating the deoptimization literals
254       position.inlined_function_id = DeoptimizationData::kNotInlinedIndex;
255     }
256 
RegisterInlinedFunctionIdInlinedFunctionHolder257     void RegisterInlinedFunctionId(size_t inlined_function_id) {
258       position.inlined_function_id = static_cast<int>(inlined_function_id);
259     }
260   };
261 
262   typedef std::vector<InlinedFunctionHolder> InlinedFunctionList;
inlined_functions()263   InlinedFunctionList& inlined_functions() { return inlined_functions_; }
264 
265   // Returns the inlining id for source position tracking.
266   int AddInlinedFunction(Handle<SharedFunctionInfo> inlined_function,
267                          SourcePosition pos);
268 
269   std::unique_ptr<char[]> GetDebugName() const;
270 
271   StackFrame::Type GetOutputStackFrameType() const;
272 
wasm_code_desc()273   WasmCodeDesc* wasm_code_desc() { return &wasm_code_desc_; }
274 
275  private:
276   OptimizedCompilationInfo(Vector<const char> debug_name,
277                            AbstractCode::Kind code_kind, Zone* zone);
278 
SetFlag(Flag flag)279   void SetFlag(Flag flag) { flags_ |= flag; }
GetFlag(Flag flag)280   bool GetFlag(Flag flag) const { return (flags_ & flag) != 0; }
281 
282   void SetTracingFlags(bool passes_filter);
283 
284   // Compilation flags.
285   unsigned flags_;
286   PoisoningMitigationLevel poisoning_level_ =
287       PoisoningMitigationLevel::kDontPoison;
288 
289   AbstractCode::Kind code_kind_;
290   uint32_t stub_key_;
291   int32_t builtin_index_;
292 
293   Handle<SharedFunctionInfo> shared_info_;
294 
295   Handle<JSFunction> closure_;
296 
297   // The compiled code.
298   Handle<Code> code_;
299   WasmCodeDesc wasm_code_desc_;
300 
301   // Entry point when compiling for OSR, {BailoutId::None} otherwise.
302   BailoutId osr_offset_;
303 
304   // The zone from which the compilation pipeline working on this
305   // OptimizedCompilationInfo allocates.
306   Zone* zone_;
307 
308   std::shared_ptr<DeferredHandles> deferred_handles_;
309 
310   // Dependencies for this compilation, e.g. stable maps.
311   std::unique_ptr<CompilationDependencies> dependencies_;
312 
313   BailoutReason bailout_reason_;
314 
315   InlinedFunctionList inlined_functions_;
316 
317   int optimization_id_;
318 
319   // The current OSR frame for specialization or {nullptr}.
320   JavaScriptFrame* osr_frame_ = nullptr;
321 
322   Vector<const char> debug_name_;
323 
324   DISALLOW_COPY_AND_ASSIGN(OptimizedCompilationInfo);
325 };
326 
327 }  // namespace internal
328 }  // namespace v8
329 
330 #endif  // V8_OPTIMIZED_COMPILATION_INFO_H_
331