1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/compiler/js-inlining.h"
6
7 #include "src/ast/ast.h"
8 #include "src/codegen/compiler.h"
9 #include "src/codegen/optimized-compilation-info.h"
10 #include "src/codegen/tick-counter.h"
11 #include "src/compiler/access-builder.h"
12 #include "src/compiler/all-nodes.h"
13 #include "src/compiler/bytecode-graph-builder.h"
14 #include "src/compiler/common-operator.h"
15 #include "src/compiler/compiler-source-position-table.h"
16 #include "src/compiler/graph-reducer.h"
17 #include "src/compiler/js-heap-broker.h"
18 #include "src/compiler/js-operator.h"
19 #include "src/compiler/node-matchers.h"
20 #include "src/compiler/node-properties.h"
21 #include "src/compiler/operator-properties.h"
22 #include "src/compiler/simplified-operator.h"
23 #include "src/execution/isolate-inl.h"
24 #include "src/objects/feedback-cell-inl.h"
25 #include "src/parsing/parse-info.h"
26
27 namespace v8 {
28 namespace internal {
29 namespace compiler {
30
31 namespace {
32 // This is just to avoid some corner cases, especially since we allow recursive
33 // inlining.
34 static const int kMaxDepthForInlining = 50;
35 } // namespace
36
37 #define TRACE(x) \
38 do { \
39 if (FLAG_trace_turbo_inlining) { \
40 StdoutStream() << x << "\n"; \
41 } \
42 } while (false)
43
44 // Provides convenience accessors for the common layout of nodes having either
45 // the {JSCall} or the {JSConstruct} operator.
46 class JSCallAccessor {
47 public:
JSCallAccessor(Node * call)48 explicit JSCallAccessor(Node* call) : call_(call) {
49 DCHECK(call->opcode() == IrOpcode::kJSCall ||
50 call->opcode() == IrOpcode::kJSConstruct);
51 }
52
target() const53 Node* target() const {
54 return call_->InputAt(JSCallOrConstructNode::TargetIndex());
55 }
56
receiver() const57 Node* receiver() const {
58 return JSCallNode{call_}.receiver();
59 }
60
new_target() const61 Node* new_target() const { return JSConstructNode{call_}.new_target(); }
62
frame_state() const63 Node* frame_state() const {
64 return NodeProperties::GetFrameStateInput(call_);
65 }
66
argument_count() const67 int argument_count() const {
68 return (call_->opcode() == IrOpcode::kJSCall)
69 ? JSCallNode{call_}.ArgumentCount()
70 : JSConstructNode{call_}.ArgumentCount();
71 }
72
frequency() const73 CallFrequency const& frequency() const {
74 return (call_->opcode() == IrOpcode::kJSCall)
75 ? JSCallNode{call_}.Parameters().frequency()
76 : JSConstructNode{call_}.Parameters().frequency();
77 }
78
79 private:
80 Node* call_;
81 };
82
InlineCall(Node * call,Node * new_target,Node * context,Node * frame_state,Node * start,Node * end,Node * exception_target,const NodeVector & uncaught_subcalls)83 Reduction JSInliner::InlineCall(Node* call, Node* new_target, Node* context,
84 Node* frame_state, Node* start, Node* end,
85 Node* exception_target,
86 const NodeVector& uncaught_subcalls) {
87 JSCallAccessor c(call);
88
89 // The scheduler is smart enough to place our code; we just ensure {control}
90 // becomes the control input of the start of the inlinee, and {effect} becomes
91 // the effect input of the start of the inlinee.
92 Node* control = NodeProperties::GetControlInput(call);
93 Node* effect = NodeProperties::GetEffectInput(call);
94
95 int const inlinee_new_target_index =
96 static_cast<int>(start->op()->ValueOutputCount()) - 3;
97 int const inlinee_arity_index =
98 static_cast<int>(start->op()->ValueOutputCount()) - 2;
99 int const inlinee_context_index =
100 static_cast<int>(start->op()->ValueOutputCount()) - 1;
101
102 // {inliner_inputs} counts the target, receiver/new_target, and arguments; but
103 // not feedback vector, context, effect or control.
104 const int inliner_inputs = c.argument_count() +
105 JSCallOrConstructNode::kExtraInputCount -
106 JSCallOrConstructNode::kFeedbackVectorInputCount;
107 // Iterate over all uses of the start node.
108 for (Edge edge : start->use_edges()) {
109 Node* use = edge.from();
110 switch (use->opcode()) {
111 case IrOpcode::kParameter: {
112 int index = 1 + ParameterIndexOf(use->op());
113 DCHECK_LE(index, inlinee_context_index);
114 if (index < inliner_inputs && index < inlinee_new_target_index) {
115 // There is an input from the call, and the index is a value
116 // projection but not the context, so rewire the input.
117 Replace(use, call->InputAt(index));
118 } else if (index == inlinee_new_target_index) {
119 // The projection is requesting the new target value.
120 Replace(use, new_target);
121 } else if (index == inlinee_arity_index) {
122 // The projection is requesting the number of arguments.
123 Replace(use, jsgraph()->Constant(c.argument_count()));
124 } else if (index == inlinee_context_index) {
125 // The projection is requesting the inlinee function context.
126 Replace(use, context);
127 } else {
128 // Call has fewer arguments than required, fill with undefined.
129 Replace(use, jsgraph()->UndefinedConstant());
130 }
131 break;
132 }
133 default:
134 if (NodeProperties::IsEffectEdge(edge)) {
135 edge.UpdateTo(effect);
136 } else if (NodeProperties::IsControlEdge(edge)) {
137 edge.UpdateTo(control);
138 } else if (NodeProperties::IsFrameStateEdge(edge)) {
139 edge.UpdateTo(frame_state);
140 } else {
141 UNREACHABLE();
142 }
143 break;
144 }
145 }
146
147 if (exception_target != nullptr) {
148 // Link uncaught calls in the inlinee to {exception_target}
149 int subcall_count = static_cast<int>(uncaught_subcalls.size());
150 if (subcall_count > 0) {
151 TRACE("Inlinee contains " << subcall_count
152 << " calls without local exception handler; "
153 << "linking to surrounding exception handler.");
154 }
155 NodeVector on_exception_nodes(local_zone_);
156 for (Node* subcall : uncaught_subcalls) {
157 Node* on_success = graph()->NewNode(common()->IfSuccess(), subcall);
158 NodeProperties::ReplaceUses(subcall, subcall, subcall, on_success);
159 NodeProperties::ReplaceControlInput(on_success, subcall);
160 Node* on_exception =
161 graph()->NewNode(common()->IfException(), subcall, subcall);
162 on_exception_nodes.push_back(on_exception);
163 }
164
165 DCHECK_EQ(subcall_count, static_cast<int>(on_exception_nodes.size()));
166 if (subcall_count > 0) {
167 Node* control_output =
168 graph()->NewNode(common()->Merge(subcall_count), subcall_count,
169 &on_exception_nodes.front());
170 NodeVector values_effects(local_zone_);
171 values_effects = on_exception_nodes;
172 values_effects.push_back(control_output);
173 Node* value_output = graph()->NewNode(
174 common()->Phi(MachineRepresentation::kTagged, subcall_count),
175 subcall_count + 1, &values_effects.front());
176 Node* effect_output =
177 graph()->NewNode(common()->EffectPhi(subcall_count),
178 subcall_count + 1, &values_effects.front());
179 ReplaceWithValue(exception_target, value_output, effect_output,
180 control_output);
181 } else {
182 ReplaceWithValue(exception_target, exception_target, exception_target,
183 jsgraph()->Dead());
184 }
185 }
186
187 NodeVector values(local_zone_);
188 NodeVector effects(local_zone_);
189 NodeVector controls(local_zone_);
190 for (Node* const input : end->inputs()) {
191 switch (input->opcode()) {
192 case IrOpcode::kReturn:
193 values.push_back(NodeProperties::GetValueInput(input, 1));
194 effects.push_back(NodeProperties::GetEffectInput(input));
195 controls.push_back(NodeProperties::GetControlInput(input));
196 break;
197 case IrOpcode::kDeoptimize:
198 case IrOpcode::kTerminate:
199 case IrOpcode::kThrow:
200 NodeProperties::MergeControlToEnd(graph(), common(), input);
201 Revisit(graph()->end());
202 break;
203 default:
204 UNREACHABLE();
205 break;
206 }
207 }
208 DCHECK_EQ(values.size(), effects.size());
209 DCHECK_EQ(values.size(), controls.size());
210
211 // Depending on whether the inlinee produces a value, we either replace value
212 // uses with said value or kill value uses if no value can be returned.
213 if (values.size() > 0) {
214 int const input_count = static_cast<int>(controls.size());
215 Node* control_output = graph()->NewNode(common()->Merge(input_count),
216 input_count, &controls.front());
217 values.push_back(control_output);
218 effects.push_back(control_output);
219 Node* value_output = graph()->NewNode(
220 common()->Phi(MachineRepresentation::kTagged, input_count),
221 static_cast<int>(values.size()), &values.front());
222 Node* effect_output =
223 graph()->NewNode(common()->EffectPhi(input_count),
224 static_cast<int>(effects.size()), &effects.front());
225 ReplaceWithValue(call, value_output, effect_output, control_output);
226 return Changed(value_output);
227 } else {
228 ReplaceWithValue(call, jsgraph()->Dead(), jsgraph()->Dead(),
229 jsgraph()->Dead());
230 return Changed(call);
231 }
232 }
233
CreateArtificialFrameState(Node * node,Node * outer_frame_state,int parameter_count,BailoutId bailout_id,FrameStateType frame_state_type,SharedFunctionInfoRef shared,Node * context)234 Node* JSInliner::CreateArtificialFrameState(Node* node, Node* outer_frame_state,
235 int parameter_count,
236 BailoutId bailout_id,
237 FrameStateType frame_state_type,
238 SharedFunctionInfoRef shared,
239 Node* context) {
240 const int parameter_count_with_receiver =
241 parameter_count + JSCallOrConstructNode::kReceiverOrNewTargetInputCount;
242 const FrameStateFunctionInfo* state_info =
243 common()->CreateFrameStateFunctionInfo(
244 frame_state_type, parameter_count_with_receiver, 0, shared.object());
245
246 const Operator* op = common()->FrameState(
247 bailout_id, OutputFrameStateCombine::Ignore(), state_info);
248 const Operator* op0 = common()->StateValues(0, SparseInputMask::Dense());
249 Node* node0 = graph()->NewNode(op0);
250
251 NodeVector params(local_zone_);
252 params.push_back(
253 node->InputAt(JSCallOrConstructNode::ReceiverOrNewTargetIndex()));
254 for (int i = 0; i < parameter_count; i++) {
255 params.push_back(node->InputAt(JSCallOrConstructNode::ArgumentIndex(i)));
256 }
257 const Operator* op_param = common()->StateValues(
258 static_cast<int>(params.size()), SparseInputMask::Dense());
259 Node* params_node = graph()->NewNode(
260 op_param, static_cast<int>(params.size()), ¶ms.front());
261 if (context == nullptr) context = jsgraph()->UndefinedConstant();
262 return graph()->NewNode(op, params_node, node0, node0, context,
263 node->InputAt(JSCallOrConstructNode::TargetIndex()),
264 outer_frame_state);
265 }
266
267 namespace {
268
NeedsImplicitReceiver(SharedFunctionInfoRef shared_info)269 bool NeedsImplicitReceiver(SharedFunctionInfoRef shared_info) {
270 DisallowHeapAllocation no_gc;
271 return !shared_info.construct_as_builtin() &&
272 !IsDerivedConstructor(shared_info.kind());
273 }
274
275 } // namespace
276
277 // Determines whether the call target of the given call {node} is statically
278 // known and can be used as an inlining candidate. The {SharedFunctionInfo} of
279 // the call target is provided (the exact closure might be unknown).
DetermineCallTarget(Node * node)280 base::Optional<SharedFunctionInfoRef> JSInliner::DetermineCallTarget(
281 Node* node) {
282 DCHECK(IrOpcode::IsInlineeOpcode(node->opcode()));
283 Node* target = node->InputAt(JSCallOrConstructNode::TargetIndex());
284 HeapObjectMatcher match(target);
285
286 // This reducer can handle both normal function calls as well a constructor
287 // calls whenever the target is a constant function object, as follows:
288 // - JSCall(target:constant, receiver, args..., vector)
289 // - JSConstruct(target:constant, new.target, args..., vector)
290 if (match.HasResolvedValue() && match.Ref(broker()).IsJSFunction()) {
291 JSFunctionRef function = match.Ref(broker()).AsJSFunction();
292
293 // The function might have not been called yet.
294 if (!function.has_feedback_vector()) {
295 return base::nullopt;
296 }
297
298 // Disallow cross native-context inlining for now. This means that all parts
299 // of the resulting code will operate on the same global object. This also
300 // prevents cross context leaks, where we could inline functions from a
301 // different context and hold on to that context (and closure) from the code
302 // object.
303 // TODO(turbofan): We might want to revisit this restriction later when we
304 // have a need for this, and we know how to model different native contexts
305 // in the same graph in a compositional way.
306 if (!function.native_context().equals(broker()->target_native_context())) {
307 return base::nullopt;
308 }
309
310 return function.shared();
311 }
312
313 // This reducer can also handle calls where the target is statically known to
314 // be the result of a closure instantiation operation, as follows:
315 // - JSCall(JSCreateClosure[shared](context), receiver, args..., vector)
316 // - JSConstruct(JSCreateClosure[shared](context),
317 // new.target, args..., vector)
318 if (match.IsJSCreateClosure()) {
319 JSCreateClosureNode n(target);
320 FeedbackCellRef cell = n.GetFeedbackCellRefChecked(broker());
321 return cell.shared_function_info();
322 } else if (match.IsCheckClosure()) {
323 FeedbackCellRef cell(broker(), FeedbackCellOf(match.op()));
324 return cell.shared_function_info();
325 }
326
327 return base::nullopt;
328 }
329
330 // Determines statically known information about the call target (assuming that
331 // the call target is known according to {DetermineCallTarget} above). The
332 // following static information is provided:
333 // - context : The context (as SSA value) bound by the call target.
334 // - feedback_vector : The target is guaranteed to use this feedback vector.
DetermineCallContext(Node * node,Node ** context_out)335 FeedbackCellRef JSInliner::DetermineCallContext(Node* node,
336 Node** context_out) {
337 DCHECK(IrOpcode::IsInlineeOpcode(node->opcode()));
338 Node* target = node->InputAt(JSCallOrConstructNode::TargetIndex());
339 HeapObjectMatcher match(target);
340
341 if (match.HasResolvedValue() && match.Ref(broker()).IsJSFunction()) {
342 JSFunctionRef function = match.Ref(broker()).AsJSFunction();
343 // This was already ensured by DetermineCallTarget
344 CHECK(function.has_feedback_vector());
345
346 // The inlinee specializes to the context from the JSFunction object.
347 *context_out = jsgraph()->Constant(function.context());
348 return function.raw_feedback_cell();
349 }
350
351 if (match.IsJSCreateClosure()) {
352 // Load the feedback vector of the target by looking up its vector cell at
353 // the instantiation site (we only decide to inline if it's populated).
354 JSCreateClosureNode n(target);
355 FeedbackCellRef cell = n.GetFeedbackCellRefChecked(broker());
356
357 // The inlinee uses the locally provided context at instantiation.
358 *context_out = NodeProperties::GetContextInput(match.node());
359 return cell;
360 } else if (match.IsCheckClosure()) {
361 FeedbackCellRef cell(broker(), FeedbackCellOf(match.op()));
362
363 Node* effect = NodeProperties::GetEffectInput(node);
364 Node* control = NodeProperties::GetControlInput(node);
365 *context_out = effect = graph()->NewNode(
366 simplified()->LoadField(AccessBuilder::ForJSFunctionContext()),
367 match.node(), effect, control);
368 NodeProperties::ReplaceEffectInput(node, effect);
369
370 return cell;
371 }
372
373 // Must succeed.
374 UNREACHABLE();
375 }
376
ReduceJSCall(Node * node)377 Reduction JSInliner::ReduceJSCall(Node* node) {
378 DCHECK(IrOpcode::IsInlineeOpcode(node->opcode()));
379 JSCallAccessor call(node);
380
381 // Determine the call target.
382 base::Optional<SharedFunctionInfoRef> shared_info(DetermineCallTarget(node));
383 if (!shared_info.has_value()) return NoChange();
384 DCHECK(shared_info->IsInlineable());
385
386 SharedFunctionInfoRef outer_shared_info(broker(), info_->shared_info());
387
388 // Constructor must be constructable.
389 if (node->opcode() == IrOpcode::kJSConstruct &&
390 !IsConstructable(shared_info->kind())) {
391 TRACE("Not inlining " << *shared_info << " into " << outer_shared_info
392 << " because constructor is not constructable.");
393 return NoChange();
394 }
395
396 // Class constructors are callable, but [[Call]] will raise an exception.
397 // See ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList ).
398 if (node->opcode() == IrOpcode::kJSCall &&
399 IsClassConstructor(shared_info->kind())) {
400 TRACE("Not inlining " << *shared_info << " into " << outer_shared_info
401 << " because callee is a class constructor.");
402 return NoChange();
403 }
404
405 // To ensure inlining always terminates, we have an upper limit on inlining
406 // the nested calls.
407 int nesting_level = 0;
408 for (Node* frame_state = call.frame_state();
409 frame_state->opcode() == IrOpcode::kFrameState;
410 frame_state = frame_state->InputAt(kFrameStateOuterStateInput)) {
411 nesting_level++;
412 if (nesting_level > kMaxDepthForInlining) {
413 TRACE("Not inlining "
414 << *shared_info << " into " << outer_shared_info
415 << " because call has exceeded the maximum depth for function "
416 "inlining.");
417 return NoChange();
418 }
419 }
420
421 Node* exception_target = nullptr;
422 NodeProperties::IsExceptionalCall(node, &exception_target);
423
424 // JSInliningHeuristic has already filtered candidates without a BytecodeArray
425 // based on SharedFunctionInfoRef::GetInlineability. For the inlineable ones
426 // (kIsInlineable), the broker holds a reference to the bytecode array, which
427 // prevents it from getting flushed. Therefore, the following check should
428 // always hold true.
429 CHECK(shared_info->is_compiled());
430
431 if (!broker()->is_concurrent_inlining() && info_->source_positions()) {
432 SharedFunctionInfo::EnsureSourcePositionsAvailable(isolate(),
433 shared_info->object());
434 }
435
436 TRACE("Inlining " << *shared_info << " into " << outer_shared_info
437 << ((exception_target != nullptr) ? " (inside try-block)"
438 : ""));
439 // Determine the target's feedback vector and its context.
440 Node* context;
441 FeedbackCellRef feedback_cell = DetermineCallContext(node, &context);
442 CHECK(broker()->IsSerializedForCompilation(
443 *shared_info, feedback_cell.value().AsFeedbackVector()));
444
445 // ----------------------------------------------------------------
446 // After this point, we've made a decision to inline this function.
447 // We shall not bailout from inlining if we got here.
448
449 BytecodeArrayRef bytecode_array = shared_info->GetBytecodeArray();
450
451 // Remember that we inlined this function.
452 int inlining_id =
453 info_->AddInlinedFunction(shared_info->object(), bytecode_array.object(),
454 source_positions_->GetSourcePosition(node));
455
456 // Create the subgraph for the inlinee.
457 Node* start;
458 Node* end;
459 {
460 // Run the BytecodeGraphBuilder to create the subgraph.
461 Graph::SubgraphScope scope(graph());
462 BytecodeGraphBuilderFlags flags(
463 BytecodeGraphBuilderFlag::kSkipFirstStackCheck);
464 if (info_->analyze_environment_liveness()) {
465 flags |= BytecodeGraphBuilderFlag::kAnalyzeEnvironmentLiveness;
466 }
467 if (info_->bailout_on_uninitialized()) {
468 flags |= BytecodeGraphBuilderFlag::kBailoutOnUninitialized;
469 }
470 {
471 CallFrequency frequency = call.frequency();
472 BuildGraphFromBytecode(broker(), zone(), *shared_info, feedback_cell,
473 BailoutId::None(), jsgraph(), frequency,
474 source_positions_, inlining_id, info_->code_kind(),
475 flags, &info_->tick_counter());
476 }
477
478 // Extract the inlinee start/end nodes.
479 start = graph()->start();
480 end = graph()->end();
481 }
482
483 // If we are inlining into a surrounding exception handler, we collect all
484 // potentially throwing nodes within the inlinee that are not handled locally
485 // by the inlinee itself. They are later wired into the surrounding handler.
486 NodeVector uncaught_subcalls(local_zone_);
487 if (exception_target != nullptr) {
488 // Find all uncaught 'calls' in the inlinee.
489 AllNodes inlined_nodes(local_zone_, end, graph());
490 for (Node* subnode : inlined_nodes.reachable) {
491 // Every possibly throwing node should get {IfSuccess} and {IfException}
492 // projections, unless there already is local exception handling.
493 if (subnode->op()->HasProperty(Operator::kNoThrow)) continue;
494 if (!NodeProperties::IsExceptionalCall(subnode)) {
495 DCHECK_EQ(2, subnode->op()->ControlOutputCount());
496 uncaught_subcalls.push_back(subnode);
497 }
498 }
499 }
500
501 Node* frame_state = call.frame_state();
502 Node* new_target = jsgraph()->UndefinedConstant();
503
504 // Inline {JSConstruct} requires some additional magic.
505 if (node->opcode() == IrOpcode::kJSConstruct) {
506 STATIC_ASSERT(JSCallOrConstructNode::kHaveIdenticalLayouts);
507 JSConstructNode n(node);
508
509 new_target = n.new_target();
510
511 // Insert nodes around the call that model the behavior required for a
512 // constructor dispatch (allocate implicit receiver and check return value).
513 // This models the behavior usually accomplished by our {JSConstructStub}.
514 // Note that the context has to be the callers context (input to call node).
515 // Also note that by splitting off the {JSCreate} piece of the constructor
516 // call, we create an observable deoptimization point after the receiver
517 // instantiation but before the invocation (i.e. inside {JSConstructStub}
518 // where execution continues at {construct_stub_create_deopt_pc_offset}).
519 Node* receiver = jsgraph()->TheHoleConstant(); // Implicit receiver.
520 Node* context = NodeProperties::GetContextInput(node);
521 if (NeedsImplicitReceiver(*shared_info)) {
522 Effect effect = n.effect();
523 Control control = n.control();
524 Node* frame_state_inside = CreateArtificialFrameState(
525 node, frame_state, n.ArgumentCount(),
526 BailoutId::ConstructStubCreate(), FrameStateType::kConstructStub,
527 *shared_info, context);
528 Node* create =
529 graph()->NewNode(javascript()->Create(), call.target(), new_target,
530 context, frame_state_inside, effect, control);
531 uncaught_subcalls.push_back(create); // Adds {IfSuccess} & {IfException}.
532 NodeProperties::ReplaceControlInput(node, create);
533 NodeProperties::ReplaceEffectInput(node, create);
534 // Placeholder to hold {node}'s value dependencies while {node} is
535 // replaced.
536 Node* dummy = graph()->NewNode(common()->Dead());
537 NodeProperties::ReplaceUses(node, dummy, node, node, node);
538 Node* result;
539 // Insert a check of the return value to determine whether the return
540 // value or the implicit receiver should be selected as a result of the
541 // call.
542 Node* check = graph()->NewNode(simplified()->ObjectIsReceiver(), node);
543 result =
544 graph()->NewNode(common()->Select(MachineRepresentation::kTagged),
545 check, node, create);
546 receiver = create; // The implicit receiver.
547 ReplaceWithValue(dummy, result);
548 } else if (IsDerivedConstructor(shared_info->kind())) {
549 Node* node_success =
550 NodeProperties::FindSuccessfulControlProjection(node);
551 Node* is_receiver =
552 graph()->NewNode(simplified()->ObjectIsReceiver(), node);
553 Node* branch_is_receiver =
554 graph()->NewNode(common()->Branch(), is_receiver, node_success);
555 Node* branch_is_receiver_true =
556 graph()->NewNode(common()->IfTrue(), branch_is_receiver);
557 Node* branch_is_receiver_false =
558 graph()->NewNode(common()->IfFalse(), branch_is_receiver);
559 branch_is_receiver_false =
560 graph()->NewNode(javascript()->CallRuntime(
561 Runtime::kThrowConstructorReturnedNonObject),
562 context, NodeProperties::GetFrameStateInput(node),
563 node, branch_is_receiver_false);
564 uncaught_subcalls.push_back(branch_is_receiver_false);
565 branch_is_receiver_false =
566 graph()->NewNode(common()->Throw(), branch_is_receiver_false,
567 branch_is_receiver_false);
568 NodeProperties::MergeControlToEnd(graph(), common(),
569 branch_is_receiver_false);
570
571 ReplaceWithValue(node_success, node_success, node_success,
572 branch_is_receiver_true);
573 // Fix input destroyed by the above {ReplaceWithValue} call.
574 NodeProperties::ReplaceControlInput(branch_is_receiver, node_success, 0);
575 }
576 node->ReplaceInput(JSCallNode::ReceiverIndex(), receiver);
577 // Insert a construct stub frame into the chain of frame states. This will
578 // reconstruct the proper frame when deoptimizing within the constructor.
579 frame_state = CreateArtificialFrameState(
580 node, frame_state, n.ArgumentCount(), BailoutId::ConstructStubInvoke(),
581 FrameStateType::kConstructStub, *shared_info, context);
582 }
583
584 // Insert a JSConvertReceiver node for sloppy callees. Note that the context
585 // passed into this node has to be the callees context (loaded above).
586 if (node->opcode() == IrOpcode::kJSCall &&
587 is_sloppy(shared_info->language_mode()) && !shared_info->native()) {
588 Node* effect = NodeProperties::GetEffectInput(node);
589 if (NodeProperties::CanBePrimitive(broker(), call.receiver(), effect)) {
590 CallParameters const& p = CallParametersOf(node->op());
591 Node* global_proxy = jsgraph()->Constant(
592 broker()->target_native_context().global_proxy_object());
593 Node* receiver = effect =
594 graph()->NewNode(simplified()->ConvertReceiver(p.convert_mode()),
595 call.receiver(), global_proxy, effect, start);
596 NodeProperties::ReplaceValueInput(node, receiver,
597 JSCallNode::ReceiverIndex());
598 NodeProperties::ReplaceEffectInput(node, effect);
599 }
600 }
601
602 // Insert argument adaptor frame if required. The callees formal parameter
603 // count (i.e. value outputs of start node minus target, receiver, new target,
604 // arguments count and context) have to match the number of arguments passed
605 // to the call.
606 int parameter_count = shared_info->internal_formal_parameter_count();
607 DCHECK_EQ(parameter_count, start->op()->ValueOutputCount() - 5);
608 if (call.argument_count() != parameter_count) {
609 frame_state = CreateArtificialFrameState(
610 node, frame_state, call.argument_count(), BailoutId::None(),
611 FrameStateType::kArgumentsAdaptor, *shared_info);
612 }
613
614 return InlineCall(node, new_target, context, frame_state, start, end,
615 exception_target, uncaught_subcalls);
616 }
617
graph() const618 Graph* JSInliner::graph() const { return jsgraph()->graph(); }
619
javascript() const620 JSOperatorBuilder* JSInliner::javascript() const {
621 return jsgraph()->javascript();
622 }
623
common() const624 CommonOperatorBuilder* JSInliner::common() const { return jsgraph()->common(); }
625
simplified() const626 SimplifiedOperatorBuilder* JSInliner::simplified() const {
627 return jsgraph()->simplified();
628 }
629
630 #undef TRACE
631
632 } // namespace compiler
633 } // namespace internal
634 } // namespace v8
635