1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/asmjs/asm-js.h"
6 #include "src/codegen/compilation-cache.h"
7 #include "src/codegen/compiler.h"
8 #include "src/common/assert-scope.h"
9 #include "src/common/message-template.h"
10 #include "src/compiler-dispatcher/optimizing-compile-dispatcher.h"
11 #include "src/deoptimizer/deoptimizer.h"
12 #include "src/execution/arguments-inl.h"
13 #include "src/execution/frames-inl.h"
14 #include "src/execution/isolate-inl.h"
15 #include "src/execution/v8threads.h"
16 #include "src/execution/vm-state-inl.h"
17 #include "src/objects/js-array-buffer-inl.h"
18 #include "src/objects/js-array-inl.h"
19 #include "src/runtime/runtime-utils.h"
20 
21 namespace v8 {
22 namespace internal {
23 
RUNTIME_FUNCTION(Runtime_CompileLazy)24 RUNTIME_FUNCTION(Runtime_CompileLazy) {
25   HandleScope scope(isolate);
26   DCHECK_EQ(1, args.length());
27   CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
28 
29   Handle<SharedFunctionInfo> sfi(function->shared(), isolate);
30 
31 #ifdef DEBUG
32   if (FLAG_trace_lazy && !sfi->is_compiled()) {
33     PrintF("[unoptimized: ");
34     function->PrintName();
35     PrintF("]\n");
36   }
37 #endif
38 
39   StackLimitCheck check(isolate);
40   if (check.JsHasOverflowed(kStackSpaceRequiredForCompilation * KB)) {
41     return isolate->StackOverflow();
42   }
43   IsCompiledScope is_compiled_scope;
44   if (!Compiler::Compile(function, Compiler::KEEP_EXCEPTION,
45                          &is_compiled_scope)) {
46     return ReadOnlyRoots(isolate).exception();
47   }
48   if (sfi->may_have_cached_code()) {
49     Handle<Code> code;
50     if (sfi->TryGetCachedCode(isolate).ToHandle(&code)) {
51       function->set_code(*code);
52       JSFunction::EnsureFeedbackVector(function, &is_compiled_scope);
53       if (FLAG_trace_turbo_nci) CompilationCacheCode::TraceHit(sfi, code);
54       return *code;
55     }
56   }
57   DCHECK(function->is_compiled());
58   return function->code();
59 }
60 
61 namespace {
62 
63 // Returns false iff an exception was thrown.
MaybeSpawnNativeContextIndependentCompilationJob(Handle<JSFunction> function,ConcurrencyMode mode)64 bool MaybeSpawnNativeContextIndependentCompilationJob(
65     Handle<JSFunction> function, ConcurrencyMode mode) {
66   if (!FLAG_turbo_nci || FLAG_turbo_nci_as_midtier) {
67     return true;  // Nothing to do.
68   }
69 
70   // If delayed codegen is enabled, the first optimization request does not
71   // trigger NCI compilation, since we try to avoid compiling Code that
72   // remains unused in the future.  Repeated optimization (possibly in
73   // different native contexts) is taken as a signal that this SFI will
74   // continue to be used in the future, thus we trigger NCI compilation.
75   if (!FLAG_turbo_nci_delayed_codegen ||
76       function->shared().has_optimized_at_least_once()) {
77     if (!Compiler::CompileOptimized(function, mode,
78                                     CodeKind::NATIVE_CONTEXT_INDEPENDENT)) {
79       return false;
80     }
81   } else {
82     function->shared().set_has_optimized_at_least_once(true);
83   }
84 
85   return true;
86 }
87 
CompileOptimized(Isolate * isolate,Handle<JSFunction> function,ConcurrencyMode mode)88 Object CompileOptimized(Isolate* isolate, Handle<JSFunction> function,
89                         ConcurrencyMode mode) {
90   StackLimitCheck check(isolate);
91   if (check.JsHasOverflowed(kStackSpaceRequiredForCompilation * KB)) {
92     return isolate->StackOverflow();
93   }
94 
95   // Compile for the next tier.
96   if (!Compiler::CompileOptimized(function, mode, function->NextTier())) {
97     return ReadOnlyRoots(isolate).exception();
98   }
99 
100   // Possibly compile for NCI caching.
101   if (!MaybeSpawnNativeContextIndependentCompilationJob(function, mode)) {
102     return ReadOnlyRoots(isolate).exception();
103   }
104 
105   DCHECK(function->is_compiled());
106   return function->code();
107 }
108 
109 }  // namespace
110 
RUNTIME_FUNCTION(Runtime_CompileOptimized_Concurrent)111 RUNTIME_FUNCTION(Runtime_CompileOptimized_Concurrent) {
112   HandleScope scope(isolate);
113   DCHECK_EQ(1, args.length());
114   CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
115   return CompileOptimized(isolate, function, ConcurrencyMode::kConcurrent);
116 }
117 
RUNTIME_FUNCTION(Runtime_CompileOptimized_NotConcurrent)118 RUNTIME_FUNCTION(Runtime_CompileOptimized_NotConcurrent) {
119   HandleScope scope(isolate);
120   DCHECK_EQ(1, args.length());
121   CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
122   return CompileOptimized(isolate, function, ConcurrencyMode::kNotConcurrent);
123 }
124 
RUNTIME_FUNCTION(Runtime_FunctionFirstExecution)125 RUNTIME_FUNCTION(Runtime_FunctionFirstExecution) {
126   HandleScope scope(isolate);
127   StackLimitCheck check(isolate);
128   DCHECK_EQ(1, args.length());
129 
130   CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
131   DCHECK_EQ(function->feedback_vector().optimization_marker(),
132             OptimizationMarker::kLogFirstExecution);
133   DCHECK(FLAG_log_function_events);
134   Handle<SharedFunctionInfo> sfi(function->shared(), isolate);
135   LOG(isolate, FunctionEvent(
136                    "first-execution", Script::cast(sfi->script()).id(), 0,
137                    sfi->StartPosition(), sfi->EndPosition(), sfi->DebugName()));
138   function->feedback_vector().ClearOptimizationMarker();
139   // Return the code to continue execution, we don't care at this point whether
140   // this is for lazy compilation or has been eagerly complied.
141   return function->code();
142 }
143 
RUNTIME_FUNCTION(Runtime_HealOptimizedCodeSlot)144 RUNTIME_FUNCTION(Runtime_HealOptimizedCodeSlot) {
145   SealHandleScope scope(isolate);
146   DCHECK_EQ(1, args.length());
147   CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
148 
149   DCHECK(function->shared().is_compiled());
150 
151   function->feedback_vector().EvictOptimizedCodeMarkedForDeoptimization(
152       function->shared(), "Runtime_HealOptimizedCodeSlot");
153   return function->code();
154 }
155 
RUNTIME_FUNCTION(Runtime_InstantiateAsmJs)156 RUNTIME_FUNCTION(Runtime_InstantiateAsmJs) {
157   HandleScope scope(isolate);
158   DCHECK_EQ(args.length(), 4);
159   CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
160 
161   Handle<JSReceiver> stdlib;
162   if (args[1].IsJSReceiver()) {
163     stdlib = args.at<JSReceiver>(1);
164   }
165   Handle<JSReceiver> foreign;
166   if (args[2].IsJSReceiver()) {
167     foreign = args.at<JSReceiver>(2);
168   }
169   Handle<JSArrayBuffer> memory;
170   if (args[3].IsJSArrayBuffer()) {
171     memory = args.at<JSArrayBuffer>(3);
172   }
173   Handle<SharedFunctionInfo> shared(function->shared(), isolate);
174   if (shared->HasAsmWasmData()) {
175     Handle<AsmWasmData> data(shared->asm_wasm_data(), isolate);
176     MaybeHandle<Object> result = AsmJs::InstantiateAsmWasm(
177         isolate, shared, data, stdlib, foreign, memory);
178     if (!result.is_null()) return *result.ToHandleChecked();
179     // Remove wasm data, mark as broken for asm->wasm, replace function code
180     // with UncompiledData, and return a smi 0 to indicate failure.
181     SharedFunctionInfo::DiscardCompiled(isolate, shared);
182   }
183   shared->set_is_asm_wasm_broken(true);
184   DCHECK(function->code() ==
185          isolate->builtins()->builtin(Builtins::kInstantiateAsmJs));
186   function->set_code(isolate->builtins()->builtin(Builtins::kCompileLazy));
187   DCHECK(!isolate->has_pending_exception());
188   return Smi::zero();
189 }
190 
RUNTIME_FUNCTION(Runtime_NotifyDeoptimized)191 RUNTIME_FUNCTION(Runtime_NotifyDeoptimized) {
192   HandleScope scope(isolate);
193   DCHECK_EQ(0, args.length());
194   Deoptimizer* deoptimizer = Deoptimizer::Grab(isolate);
195   DCHECK(CodeKindCanDeoptimize(deoptimizer->compiled_code()->kind()));
196   DCHECK(deoptimizer->compiled_code()->is_turbofanned());
197   DCHECK(AllowHeapAllocation::IsAllowed());
198   DCHECK(AllowGarbageCollection::IsAllowed());
199   DCHECK(isolate->context().is_null());
200 
201   TimerEventScope<TimerEventDeoptimizeCode> timer(isolate);
202   TRACE_EVENT0("v8", "V8.DeoptimizeCode");
203   Handle<JSFunction> function = deoptimizer->function();
204   // For OSR the optimized code isn't installed on the function, so get the
205   // code object from deoptimizer.
206   Handle<Code> optimized_code = deoptimizer->compiled_code();
207   DeoptimizeKind type = deoptimizer->deopt_kind();
208   bool should_reuse_code = deoptimizer->should_reuse_code();
209 
210   // TODO(turbofan): We currently need the native context to materialize
211   // the arguments object, but only to get to its map.
212   isolate->set_context(deoptimizer->function()->native_context());
213 
214   // Make sure to materialize objects before causing any allocation.
215   deoptimizer->MaterializeHeapObjects();
216   delete deoptimizer;
217 
218   // Ensure the context register is updated for materialized objects.
219   JavaScriptFrameIterator top_it(isolate);
220   JavaScriptFrame* top_frame = top_it.frame();
221   isolate->set_context(Context::cast(top_frame->context()));
222 
223   if (should_reuse_code) {
224     optimized_code->increment_deoptimization_count();
225     return ReadOnlyRoots(isolate).undefined_value();
226   }
227 
228   // Invalidate the underlying optimized code on eager and soft deopts.
229   if (type == DeoptimizeKind::kEager || type == DeoptimizeKind::kSoft) {
230     Deoptimizer::DeoptimizeFunction(*function, *optimized_code);
231   }
232 
233   return ReadOnlyRoots(isolate).undefined_value();
234 }
235 
236 
IsSuitableForOnStackReplacement(Isolate * isolate,Handle<JSFunction> function)237 static bool IsSuitableForOnStackReplacement(Isolate* isolate,
238                                             Handle<JSFunction> function) {
239   // Keep track of whether we've succeeded in optimizing.
240   if (function->shared().optimization_disabled()) return false;
241   // TODO(chromium:1031479): Currently, OSR triggering mechanism is tied to the
242   // bytecode array. So, it might be possible to mark closure in one native
243   // context and optimize a closure from a different native context. So check if
244   // there is a feedback vector before OSRing. We don't expect this to happen
245   // often.
246   if (!function->has_feedback_vector()) return false;
247   // If we are trying to do OSR when there are already optimized
248   // activations of the function, it means (a) the function is directly or
249   // indirectly recursive and (b) an optimized invocation has been
250   // deoptimized so that we are currently in an unoptimized activation.
251   // Check for optimized activations of this function.
252   for (JavaScriptFrameIterator it(isolate); !it.done(); it.Advance()) {
253     JavaScriptFrame* frame = it.frame();
254     if (frame->is_optimized() && frame->function() == *function) return false;
255   }
256 
257   return true;
258 }
259 
260 namespace {
261 
DetermineEntryAndDisarmOSRForInterpreter(JavaScriptFrame * frame)262 BailoutId DetermineEntryAndDisarmOSRForInterpreter(JavaScriptFrame* frame) {
263   InterpretedFrame* iframe = reinterpret_cast<InterpretedFrame*>(frame);
264 
265   // Note that the bytecode array active on the stack might be different from
266   // the one installed on the function (e.g. patched by debugger). This however
267   // is fine because we guarantee the layout to be in sync, hence any BailoutId
268   // representing the entry point will be valid for any copy of the bytecode.
269   Handle<BytecodeArray> bytecode(iframe->GetBytecodeArray(), iframe->isolate());
270 
271   DCHECK(frame->LookupCode().is_interpreter_trampoline_builtin());
272   DCHECK(frame->function().shared().HasBytecodeArray());
273   DCHECK(frame->is_interpreted());
274 
275   // Reset the OSR loop nesting depth to disarm back edges.
276   bytecode->set_osr_loop_nesting_level(0);
277 
278   // Return a BailoutId representing the bytecode offset of the back branch.
279   return BailoutId(iframe->GetBytecodeOffset());
280 }
281 
282 }  // namespace
283 
RUNTIME_FUNCTION(Runtime_CompileForOnStackReplacement)284 RUNTIME_FUNCTION(Runtime_CompileForOnStackReplacement) {
285   HandleScope scope(isolate);
286   DCHECK_EQ(0, args.length());
287 
288   // Only reachable when OST is enabled.
289   CHECK(FLAG_use_osr);
290 
291   // Determine frame triggering OSR request.
292   JavaScriptFrameIterator it(isolate);
293   JavaScriptFrame* frame = it.frame();
294   DCHECK(frame->is_interpreted());
295 
296   // Determine the entry point for which this OSR request has been fired and
297   // also disarm all back edges in the calling code to stop new requests.
298   BailoutId ast_id = DetermineEntryAndDisarmOSRForInterpreter(frame);
299   DCHECK(!ast_id.IsNone());
300 
301   MaybeHandle<Code> maybe_result;
302   Handle<JSFunction> function(frame->function(), isolate);
303   if (IsSuitableForOnStackReplacement(isolate, function)) {
304     if (FLAG_trace_osr) {
305       CodeTracer::Scope scope(isolate->GetCodeTracer());
306       PrintF(scope.file(), "[OSR - Compiling: ");
307       function->PrintName(scope.file());
308       PrintF(scope.file(), " at AST id %d]\n", ast_id.ToInt());
309     }
310     maybe_result = Compiler::GetOptimizedCodeForOSR(function, ast_id, frame);
311 
312     // Possibly compile for NCI caching.
313     if (!MaybeSpawnNativeContextIndependentCompilationJob(
314             function, FLAG_concurrent_recompilation
315                           ? ConcurrencyMode::kConcurrent
316                           : ConcurrencyMode::kNotConcurrent)) {
317       return Object();
318     }
319   }
320 
321   // Check whether we ended up with usable optimized code.
322   Handle<Code> result;
323   if (maybe_result.ToHandle(&result) &&
324       CodeKindIsOptimizedJSFunction(result->kind())) {
325     DeoptimizationData data =
326         DeoptimizationData::cast(result->deoptimization_data());
327 
328     if (data.OsrPcOffset().value() >= 0) {
329       DCHECK(BailoutId(data.OsrBytecodeOffset().value()) == ast_id);
330       if (FLAG_trace_osr) {
331         CodeTracer::Scope scope(isolate->GetCodeTracer());
332         PrintF(scope.file(),
333                "[OSR - Entry at AST id %d, offset %d in optimized code]\n",
334                ast_id.ToInt(), data.OsrPcOffset().value());
335       }
336 
337       DCHECK(result->is_turbofanned());
338       if (function->feedback_vector().invocation_count() <= 1 &&
339           function->HasOptimizationMarker()) {
340         // With lazy feedback allocation we may not have feedback for the
341         // initial part of the function that was executed before we allocated a
342         // feedback vector. Reset any optimization markers for such functions.
343         //
344         // TODO(mythria): Instead of resetting the optimization marker here we
345         // should only mark a function for optimization if it has sufficient
346         // feedback. We cannot do this currently since we OSR only after we mark
347         // a function for optimization. We should instead change it to be based
348         // based on number of ticks.
349         DCHECK(!function->IsInOptimizationQueue());
350         function->ClearOptimizationMarker();
351       }
352       // TODO(mythria): Once we have OSR code cache we may not need to mark
353       // the function for non-concurrent compilation. We could arm the loops
354       // early so the second execution uses the already compiled OSR code and
355       // the optimization occurs concurrently off main thread.
356       if (!function->HasAvailableOptimizedCode() &&
357           function->feedback_vector().invocation_count() > 1) {
358         // If we're not already optimized, set to optimize non-concurrently on
359         // the next call, otherwise we'd run unoptimized once more and
360         // potentially compile for OSR again.
361         if (FLAG_trace_osr) {
362           CodeTracer::Scope scope(isolate->GetCodeTracer());
363           PrintF(scope.file(), "[OSR - Re-marking ");
364           function->PrintName(scope.file());
365           PrintF(scope.file(), " for non-concurrent optimization]\n");
366         }
367         function->SetOptimizationMarker(OptimizationMarker::kCompileOptimized);
368       }
369       return *result;
370     }
371   }
372 
373   // Failed.
374   if (FLAG_trace_osr) {
375     CodeTracer::Scope scope(isolate->GetCodeTracer());
376     PrintF(scope.file(), "[OSR - Failed: ");
377     function->PrintName(scope.file());
378     PrintF(scope.file(), " at AST id %d]\n", ast_id.ToInt());
379   }
380 
381   if (!function->HasAttachedOptimizedCode()) {
382     function->set_code(function->shared().GetCode());
383   }
384   return Object();
385 }
386 
CompileGlobalEval(Isolate * isolate,Handle<i::Object> source_object,Handle<SharedFunctionInfo> outer_info,LanguageMode language_mode,int eval_scope_position,int eval_position)387 static Object CompileGlobalEval(Isolate* isolate,
388                                 Handle<i::Object> source_object,
389                                 Handle<SharedFunctionInfo> outer_info,
390                                 LanguageMode language_mode,
391                                 int eval_scope_position, int eval_position) {
392   Handle<Context> context(isolate->context(), isolate);
393   Handle<Context> native_context(context->native_context(), isolate);
394 
395   // Check if native context allows code generation from
396   // strings. Throw an exception if it doesn't.
397   MaybeHandle<String> source;
398   bool unknown_object;
399   std::tie(source, unknown_object) = Compiler::ValidateDynamicCompilationSource(
400       isolate, native_context, source_object);
401   // If the argument is an unhandled string time, bounce to GlobalEval.
402   if (unknown_object) {
403     return native_context->global_eval_fun();
404   }
405   if (source.is_null()) {
406     Handle<Object> error_message =
407         native_context->ErrorMessageForCodeGenerationFromStrings();
408     Handle<Object> error;
409     MaybeHandle<Object> maybe_error = isolate->factory()->NewEvalError(
410         MessageTemplate::kCodeGenFromStrings, error_message);
411     if (maybe_error.ToHandle(&error)) isolate->Throw(*error);
412     return ReadOnlyRoots(isolate).exception();
413   }
414 
415   // Deal with a normal eval call with a string argument. Compile it
416   // and return the compiled function bound in the local context.
417   static const ParseRestriction restriction = NO_PARSE_RESTRICTION;
418   Handle<JSFunction> compiled;
419   ASSIGN_RETURN_ON_EXCEPTION_VALUE(
420       isolate, compiled,
421       Compiler::GetFunctionFromEval(
422           source.ToHandleChecked(), outer_info, context, language_mode,
423           restriction, kNoSourcePosition, eval_scope_position, eval_position),
424       ReadOnlyRoots(isolate).exception());
425   return *compiled;
426 }
427 
RUNTIME_FUNCTION(Runtime_ResolvePossiblyDirectEval)428 RUNTIME_FUNCTION(Runtime_ResolvePossiblyDirectEval) {
429   HandleScope scope(isolate);
430   DCHECK_EQ(6, args.length());
431 
432   Handle<Object> callee = args.at(0);
433 
434   // If "eval" didn't refer to the original GlobalEval, it's not a
435   // direct call to eval.
436   if (*callee != isolate->native_context()->global_eval_fun()) {
437     return *callee;
438   }
439 
440   DCHECK(args[3].IsSmi());
441   DCHECK(is_valid_language_mode(args.smi_at(3)));
442   LanguageMode language_mode = static_cast<LanguageMode>(args.smi_at(3));
443   DCHECK(args[4].IsSmi());
444   Handle<SharedFunctionInfo> outer_info(args.at<JSFunction>(2)->shared(),
445                                         isolate);
446   return CompileGlobalEval(isolate, args.at<Object>(1), outer_info,
447                            language_mode, args.smi_at(4), args.smi_at(5));
448 }
449 
450 }  // namespace internal
451 }  // namespace v8
452