1 // Copyright 2018 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/wasm/graph-builder-interface.h"
6
7 #include "src/compiler/wasm-compiler.h"
8 #include "src/flags/flags.h"
9 #include "src/handles/handles.h"
10 #include "src/objects/objects-inl.h"
11 #include "src/utils/ostreams.h"
12 #include "src/wasm/branch-hint-map.h"
13 #include "src/wasm/decoder.h"
14 #include "src/wasm/function-body-decoder-impl.h"
15 #include "src/wasm/function-body-decoder.h"
16 #include "src/wasm/value-type.h"
17 #include "src/wasm/wasm-limits.h"
18 #include "src/wasm/wasm-linkage.h"
19 #include "src/wasm/wasm-module.h"
20 #include "src/wasm/wasm-opcodes-inl.h"
21
22 namespace v8 {
23 namespace internal {
24 namespace wasm {
25
26 namespace {
27
28 // An SsaEnv environment carries the current local variable renaming
29 // as well as the current effect and control dependency in the TF graph.
30 // It maintains a control state that tracks whether the environment
31 // is reachable, has reached a control end, or has been merged.
32 struct SsaEnv : public ZoneObject {
33 enum State { kUnreachable, kReached, kMerged };
34
35 State state;
36 TFNode* control;
37 TFNode* effect;
38 compiler::WasmInstanceCacheNodes instance_cache;
39 ZoneVector<TFNode*> locals;
40
SsaEnvv8::internal::wasm::__anon66b31e6d0111::SsaEnv41 SsaEnv(Zone* zone, State state, TFNode* control, TFNode* effect,
42 uint32_t locals_size)
43 : state(state),
44 control(control),
45 effect(effect),
46 locals(locals_size, zone) {}
47
48 SsaEnv(const SsaEnv& other) V8_NOEXCEPT = default;
SsaEnvv8::internal::wasm::__anon66b31e6d0111::SsaEnv49 SsaEnv(SsaEnv&& other) V8_NOEXCEPT : state(other.state),
50 control(other.control),
51 effect(other.effect),
52 instance_cache(other.instance_cache),
53 locals(std::move(other.locals)) {
54 other.Kill();
55 }
56
Killv8::internal::wasm::__anon66b31e6d0111::SsaEnv57 void Kill() {
58 state = kUnreachable;
59 for (TFNode*& local : locals) {
60 local = nullptr;
61 }
62 control = nullptr;
63 effect = nullptr;
64 instance_cache = {};
65 }
SetNotMergedv8::internal::wasm::__anon66b31e6d0111::SsaEnv66 void SetNotMerged() {
67 if (state == kMerged) state = kReached;
68 }
69 };
70
71 class WasmGraphBuildingInterface {
72 public:
73 static constexpr Decoder::ValidateFlag validate = Decoder::kFullValidation;
74 using FullDecoder = WasmFullDecoder<validate, WasmGraphBuildingInterface>;
75 using CheckForNull = compiler::WasmGraphBuilder::CheckForNull;
76
77 struct Value : public ValueBase<validate> {
78 TFNode* node = nullptr;
79
80 template <typename... Args>
Valuev8::internal::wasm::__anon66b31e6d0111::WasmGraphBuildingInterface::Value81 explicit Value(Args&&... args) V8_NOEXCEPT
82 : ValueBase(std::forward<Args>(args)...) {}
83 };
84 using ValueVector = base::SmallVector<Value, 8>;
85 using NodeVector = base::SmallVector<TFNode*, 8>;
86
87 struct TryInfo : public ZoneObject {
88 SsaEnv* catch_env;
89 TFNode* exception = nullptr;
90
might_throwv8::internal::wasm::__anon66b31e6d0111::WasmGraphBuildingInterface::TryInfo91 bool might_throw() const { return exception != nullptr; }
92
93 MOVE_ONLY_NO_DEFAULT_CONSTRUCTOR(TryInfo);
94
TryInfov8::internal::wasm::__anon66b31e6d0111::WasmGraphBuildingInterface::TryInfo95 explicit TryInfo(SsaEnv* c) : catch_env(c) {}
96 };
97
98 struct Control : public ControlBase<Value, validate> {
99 SsaEnv* merge_env = nullptr; // merge environment for the construct.
100 SsaEnv* false_env = nullptr; // false environment (only for if).
101 TryInfo* try_info = nullptr; // information about try statements.
102 int32_t previous_catch = -1; // previous Control with a catch.
103 BitVector* loop_assignments = nullptr; // locals assigned in this loop.
104 TFNode* loop_node = nullptr; // loop header of this loop.
105 MOVE_ONLY_NO_DEFAULT_CONSTRUCTOR(Control);
106
107 template <typename... Args>
Controlv8::internal::wasm::__anon66b31e6d0111::WasmGraphBuildingInterface::Control108 explicit Control(Args&&... args) V8_NOEXCEPT
109 : ControlBase(std::forward<Args>(args)...) {}
110 };
111
WasmGraphBuildingInterface(compiler::WasmGraphBuilder * builder,int func_index,InlinedStatus inlined_status)112 WasmGraphBuildingInterface(compiler::WasmGraphBuilder* builder,
113 int func_index, InlinedStatus inlined_status)
114 : builder_(builder),
115 func_index_(func_index),
116 inlined_status_(inlined_status) {}
117
StartFunction(FullDecoder * decoder)118 void StartFunction(FullDecoder* decoder) {
119 // Get the branch hints map for this function (if available)
120 if (decoder->module_) {
121 auto branch_hints_it = decoder->module_->branch_hints.find(func_index_);
122 if (branch_hints_it != decoder->module_->branch_hints.end()) {
123 branch_hints_ = &branch_hints_it->second;
124 }
125 }
126 // The first '+ 1' is needed by TF Start node, the second '+ 1' is for the
127 // instance parameter.
128 builder_->Start(static_cast<int>(decoder->sig_->parameter_count() + 1 + 1));
129 uint32_t num_locals = decoder->num_locals();
130 SsaEnv* ssa_env = decoder->zone()->New<SsaEnv>(
131 decoder->zone(), SsaEnv::kReached, effect(), control(), num_locals);
132 SetEnv(ssa_env);
133
134 // Initialize local variables. Parameters are shifted by 1 because of the
135 // the instance parameter.
136 uint32_t index = 0;
137 for (; index < decoder->sig_->parameter_count(); ++index) {
138 ssa_env->locals[index] = builder_->Param(index + 1);
139 }
140 while (index < num_locals) {
141 ValueType type = decoder->local_type(index);
142 TFNode* node;
143 if ((decoder->enabled_.has_nn_locals() ||
144 decoder->enabled_.has_unsafe_nn_locals()) &&
145 !type.is_defaultable()) {
146 DCHECK(type.is_reference());
147 // TODO(jkummerow): Consider using "the hole" instead, to make any
148 // illegal uses more obvious.
149 node = builder_->RefNull();
150 } else {
151 node = DefaultValue(type);
152 }
153 while (index < num_locals && decoder->local_type(index) == type) {
154 // Do a whole run of like-typed locals at a time.
155 ssa_env->locals[index++] = node;
156 }
157 }
158 LoadContextIntoSsa(ssa_env);
159
160 if (FLAG_trace_wasm && inlined_status_ == kRegularFunction) {
161 builder_->TraceFunctionEntry(decoder->position());
162 }
163 }
164
165 // Reload the instance cache entries into the Ssa Environment.
LoadContextIntoSsa(SsaEnv * ssa_env)166 void LoadContextIntoSsa(SsaEnv* ssa_env) {
167 if (ssa_env) builder_->InitInstanceCache(&ssa_env->instance_cache);
168 }
169
StartFunctionBody(FullDecoder * decoder,Control * block)170 void StartFunctionBody(FullDecoder* decoder, Control* block) {}
171
FinishFunction(FullDecoder *)172 void FinishFunction(FullDecoder*) {
173 if (inlined_status_ == kRegularFunction) {
174 builder_->PatchInStackCheckIfNeeded();
175 }
176 }
177
OnFirstError(FullDecoder *)178 void OnFirstError(FullDecoder*) {}
179
NextInstruction(FullDecoder *,WasmOpcode)180 void NextInstruction(FullDecoder*, WasmOpcode) {}
181
Block(FullDecoder * decoder,Control * block)182 void Block(FullDecoder* decoder, Control* block) {
183 // The branch environment is the outer environment.
184 block->merge_env = ssa_env_;
185 SetEnv(Steal(decoder->zone(), ssa_env_));
186 }
187
Loop(FullDecoder * decoder,Control * block)188 void Loop(FullDecoder* decoder, Control* block) {
189 // This is the merge environment at the beginning of the loop.
190 SsaEnv* merge_env = Steal(decoder->zone(), ssa_env_);
191 block->merge_env = merge_env;
192 SetEnv(merge_env);
193
194 ssa_env_->state = SsaEnv::kMerged;
195
196 TFNode* loop_node = builder_->Loop(control());
197
198 if (emit_loop_exits()) {
199 uint32_t nesting_depth = 0;
200 for (uint32_t depth = 1; depth < decoder->control_depth(); depth++) {
201 if (decoder->control_at(depth)->is_loop()) {
202 nesting_depth++;
203 }
204 }
205 // If this loop is nested, the parent loop's is_innermost field needs to
206 // be false. If the last loop in loop_infos_ has less depth, it has to be
207 // the parent loop. If it does not, it means another loop has been found
208 // within the parent loop, and that loop will have set the parent's
209 // is_innermost to false, so we do not need to do anything.
210 if (nesting_depth > 0 &&
211 loop_infos_.back().nesting_depth < nesting_depth) {
212 loop_infos_.back().is_innermost = false;
213 }
214 loop_infos_.emplace_back(loop_node, nesting_depth, true);
215 }
216
217 builder_->SetControl(loop_node);
218 decoder->control_at(0)->loop_node = loop_node;
219
220 TFNode* effect_inputs[] = {effect(), control()};
221 builder_->SetEffect(builder_->EffectPhi(1, effect_inputs));
222 builder_->TerminateLoop(effect(), control());
223 // Doing a preprocessing pass to analyze loop assignments seems to pay off
224 // compared to reallocating Nodes when rearranging Phis in Goto.
225 BitVector* assigned = WasmDecoder<validate>::AnalyzeLoopAssignment(
226 decoder, decoder->pc(), decoder->num_locals(), decoder->zone());
227 if (decoder->failed()) return;
228 DCHECK_NOT_NULL(assigned);
229 decoder->control_at(0)->loop_assignments = assigned;
230
231 // Only introduce phis for variables assigned in this loop.
232 int instance_cache_index = decoder->num_locals();
233 for (int i = decoder->num_locals() - 1; i >= 0; i--) {
234 if (!assigned->Contains(i)) continue;
235 TFNode* inputs[] = {ssa_env_->locals[i], control()};
236 ssa_env_->locals[i] = builder_->Phi(decoder->local_type(i), 1, inputs);
237 }
238 // Introduce phis for instance cache pointers if necessary.
239 if (assigned->Contains(instance_cache_index)) {
240 builder_->PrepareInstanceCacheForLoop(&ssa_env_->instance_cache,
241 control());
242 }
243
244 // Now we setup a new environment for the inside of the loop.
245 SetEnv(Split(decoder->zone(), ssa_env_));
246 builder_->StackCheck(decoder->position());
247 ssa_env_->SetNotMerged();
248
249 // Wrap input merge into phis.
250 for (uint32_t i = 0; i < block->start_merge.arity; ++i) {
251 Value& val = block->start_merge[i];
252 TFNode* inputs[] = {val.node, block->merge_env->control};
253 val.node = builder_->Phi(val.type, 1, inputs);
254 }
255 }
256
Try(FullDecoder * decoder,Control * block)257 void Try(FullDecoder* decoder, Control* block) {
258 SsaEnv* outer_env = ssa_env_;
259 SsaEnv* catch_env = Split(decoder->zone(), outer_env);
260 // Mark catch environment as unreachable, since only accessable
261 // through catch unwinding (i.e. landing pads).
262 catch_env->state = SsaEnv::kUnreachable;
263 SsaEnv* try_env = Steal(decoder->zone(), outer_env);
264 SetEnv(try_env);
265 TryInfo* try_info = decoder->zone()->New<TryInfo>(catch_env);
266 block->merge_env = outer_env;
267 block->try_info = try_info;
268 }
269
If(FullDecoder * decoder,const Value & cond,Control * if_block)270 void If(FullDecoder* decoder, const Value& cond, Control* if_block) {
271 TFNode* if_true = nullptr;
272 TFNode* if_false = nullptr;
273 WasmBranchHint hint = WasmBranchHint::kNoHint;
274 if (branch_hints_) {
275 hint = branch_hints_->GetHintFor(decoder->pc_relative_offset());
276 }
277 switch (hint) {
278 case WasmBranchHint::kNoHint:
279 builder_->BranchNoHint(cond.node, &if_true, &if_false);
280 break;
281 case WasmBranchHint::kUnlikely:
282 builder_->BranchExpectFalse(cond.node, &if_true, &if_false);
283 break;
284 case WasmBranchHint::kLikely:
285 builder_->BranchExpectTrue(cond.node, &if_true, &if_false);
286 break;
287 }
288 SsaEnv* merge_env = ssa_env_;
289 SsaEnv* false_env = Split(decoder->zone(), ssa_env_);
290 false_env->control = if_false;
291 SsaEnv* true_env = Steal(decoder->zone(), ssa_env_);
292 true_env->control = if_true;
293 if_block->merge_env = merge_env;
294 if_block->false_env = false_env;
295 SetEnv(true_env);
296 }
297
FallThruTo(FullDecoder * decoder,Control * c)298 void FallThruTo(FullDecoder* decoder, Control* c) {
299 DCHECK(!c->is_loop());
300 MergeValuesInto(decoder, c, &c->end_merge);
301 }
302
PopControl(FullDecoder * decoder,Control * block)303 void PopControl(FullDecoder* decoder, Control* block) {
304 // A loop just continues with the end environment. There is no merge.
305 // However, if loop unrolling is enabled, we must create a loop exit and
306 // wrap the fallthru values on the stack.
307 if (block->is_loop()) {
308 if (emit_loop_exits() && block->reachable()) {
309 BuildLoopExits(decoder, block);
310 WrapLocalsAtLoopExit(decoder, block);
311 uint32_t arity = block->end_merge.arity;
312 if (arity > 0) {
313 Value* stack_base = decoder->stack_value(arity);
314 for (uint32_t i = 0; i < arity; i++) {
315 Value* val = stack_base + i;
316 val->node = builder_->LoopExitValue(
317 val->node, val->type.machine_representation());
318 }
319 }
320 }
321 return;
322 }
323 // Any other block falls through to the parent block.
324 if (block->reachable()) FallThruTo(decoder, block);
325 if (block->is_onearmed_if()) {
326 // Merge the else branch into the end merge.
327 SetEnv(block->false_env);
328 DCHECK_EQ(block->start_merge.arity, block->end_merge.arity);
329 Value* values =
330 block->start_merge.arity > 0 ? &block->start_merge[0] : nullptr;
331 MergeValuesInto(decoder, block, &block->end_merge, values);
332 }
333 // Now continue with the merged environment.
334 SetEnv(block->merge_env);
335 }
336
UnOp(FullDecoder * decoder,WasmOpcode opcode,const Value & value,Value * result)337 void UnOp(FullDecoder* decoder, WasmOpcode opcode, const Value& value,
338 Value* result) {
339 result->node = builder_->Unop(opcode, value.node, decoder->position());
340 }
341
BinOp(FullDecoder * decoder,WasmOpcode opcode,const Value & lhs,const Value & rhs,Value * result)342 void BinOp(FullDecoder* decoder, WasmOpcode opcode, const Value& lhs,
343 const Value& rhs, Value* result) {
344 TFNode* node =
345 builder_->Binop(opcode, lhs.node, rhs.node, decoder->position());
346 if (result) result->node = node;
347 }
348
I32Const(FullDecoder * decoder,Value * result,int32_t value)349 void I32Const(FullDecoder* decoder, Value* result, int32_t value) {
350 result->node = builder_->Int32Constant(value);
351 }
352
I64Const(FullDecoder * decoder,Value * result,int64_t value)353 void I64Const(FullDecoder* decoder, Value* result, int64_t value) {
354 result->node = builder_->Int64Constant(value);
355 }
356
F32Const(FullDecoder * decoder,Value * result,float value)357 void F32Const(FullDecoder* decoder, Value* result, float value) {
358 result->node = builder_->Float32Constant(value);
359 }
360
F64Const(FullDecoder * decoder,Value * result,double value)361 void F64Const(FullDecoder* decoder, Value* result, double value) {
362 result->node = builder_->Float64Constant(value);
363 }
364
S128Const(FullDecoder * decoder,const Simd128Immediate<validate> & imm,Value * result)365 void S128Const(FullDecoder* decoder, const Simd128Immediate<validate>& imm,
366 Value* result) {
367 result->node = builder_->Simd128Constant(imm.value);
368 }
369
RefNull(FullDecoder * decoder,ValueType type,Value * result)370 void RefNull(FullDecoder* decoder, ValueType type, Value* result) {
371 result->node = builder_->RefNull();
372 }
373
RefFunc(FullDecoder * decoder,uint32_t function_index,Value * result)374 void RefFunc(FullDecoder* decoder, uint32_t function_index, Value* result) {
375 result->node = builder_->RefFunc(function_index);
376 }
377
RefAsNonNull(FullDecoder * decoder,const Value & arg,Value * result)378 void RefAsNonNull(FullDecoder* decoder, const Value& arg, Value* result) {
379 result->node = builder_->RefAsNonNull(arg.node, decoder->position());
380 }
381
Drop(FullDecoder * decoder)382 void Drop(FullDecoder* decoder) {}
383
LocalGet(FullDecoder * decoder,Value * result,const IndexImmediate<validate> & imm)384 void LocalGet(FullDecoder* decoder, Value* result,
385 const IndexImmediate<validate>& imm) {
386 result->node = ssa_env_->locals[imm.index];
387 }
388
LocalSet(FullDecoder * decoder,const Value & value,const IndexImmediate<validate> & imm)389 void LocalSet(FullDecoder* decoder, const Value& value,
390 const IndexImmediate<validate>& imm) {
391 ssa_env_->locals[imm.index] = value.node;
392 }
393
LocalTee(FullDecoder * decoder,const Value & value,Value * result,const IndexImmediate<validate> & imm)394 void LocalTee(FullDecoder* decoder, const Value& value, Value* result,
395 const IndexImmediate<validate>& imm) {
396 result->node = value.node;
397 ssa_env_->locals[imm.index] = value.node;
398 }
399
AllocateLocals(FullDecoder * decoder,base::Vector<Value> local_values)400 void AllocateLocals(FullDecoder* decoder, base::Vector<Value> local_values) {
401 ZoneVector<TFNode*>* locals = &ssa_env_->locals;
402 locals->insert(locals->begin(), local_values.size(), nullptr);
403 for (uint32_t i = 0; i < local_values.size(); i++) {
404 (*locals)[i] = local_values[i].node;
405 }
406 }
407
DeallocateLocals(FullDecoder * decoder,uint32_t count)408 void DeallocateLocals(FullDecoder* decoder, uint32_t count) {
409 ZoneVector<TFNode*>* locals = &ssa_env_->locals;
410 locals->erase(locals->begin(), locals->begin() + count);
411 }
412
GlobalGet(FullDecoder * decoder,Value * result,const GlobalIndexImmediate<validate> & imm)413 void GlobalGet(FullDecoder* decoder, Value* result,
414 const GlobalIndexImmediate<validate>& imm) {
415 result->node = builder_->GlobalGet(imm.index);
416 }
417
GlobalSet(FullDecoder * decoder,const Value & value,const GlobalIndexImmediate<validate> & imm)418 void GlobalSet(FullDecoder* decoder, const Value& value,
419 const GlobalIndexImmediate<validate>& imm) {
420 builder_->GlobalSet(imm.index, value.node);
421 }
422
TableGet(FullDecoder * decoder,const Value & index,Value * result,const IndexImmediate<validate> & imm)423 void TableGet(FullDecoder* decoder, const Value& index, Value* result,
424 const IndexImmediate<validate>& imm) {
425 result->node =
426 builder_->TableGet(imm.index, index.node, decoder->position());
427 }
428
TableSet(FullDecoder * decoder,const Value & index,const Value & value,const IndexImmediate<validate> & imm)429 void TableSet(FullDecoder* decoder, const Value& index, const Value& value,
430 const IndexImmediate<validate>& imm) {
431 builder_->TableSet(imm.index, index.node, value.node, decoder->position());
432 }
433
Trap(FullDecoder * decoder,TrapReason reason)434 void Trap(FullDecoder* decoder, TrapReason reason) {
435 ValueVector values;
436 if (emit_loop_exits()) {
437 BuildNestedLoopExits(decoder, decoder->control_depth() - 1, false,
438 values);
439 }
440 builder_->Trap(reason, decoder->position());
441 }
442
AssertNull(FullDecoder * decoder,const Value & obj,Value * result)443 void AssertNull(FullDecoder* decoder, const Value& obj, Value* result) {
444 builder_->TrapIfFalse(
445 wasm::TrapReason::kTrapIllegalCast,
446 builder_->Binop(kExprRefEq, obj.node, builder_->RefNull(),
447 decoder->position()),
448 decoder->position());
449 result->node = obj.node;
450 }
451
NopForTestingUnsupportedInLiftoff(FullDecoder * decoder)452 void NopForTestingUnsupportedInLiftoff(FullDecoder* decoder) {}
453
Select(FullDecoder * decoder,const Value & cond,const Value & fval,const Value & tval,Value * result)454 void Select(FullDecoder* decoder, const Value& cond, const Value& fval,
455 const Value& tval, Value* result) {
456 result->node =
457 builder_->Select(cond.node, tval.node, fval.node, result->type);
458 }
459
CopyStackValues(FullDecoder * decoder,uint32_t count,uint32_t drop_values)460 ValueVector CopyStackValues(FullDecoder* decoder, uint32_t count,
461 uint32_t drop_values) {
462 Value* stack_base =
463 count > 0 ? decoder->stack_value(count + drop_values) : nullptr;
464 ValueVector stack_values(count);
465 for (uint32_t i = 0; i < count; i++) {
466 stack_values[i] = stack_base[i];
467 }
468 return stack_values;
469 }
470
DoReturn(FullDecoder * decoder,uint32_t drop_values)471 void DoReturn(FullDecoder* decoder, uint32_t drop_values) {
472 uint32_t ret_count = static_cast<uint32_t>(decoder->sig_->return_count());
473 NodeVector values(ret_count);
474 SsaEnv* internal_env = ssa_env_;
475 if (emit_loop_exits()) {
476 SsaEnv* exit_env = Split(decoder->zone(), ssa_env_);
477 SetEnv(exit_env);
478 auto stack_values = CopyStackValues(decoder, ret_count, drop_values);
479 BuildNestedLoopExits(decoder, decoder->control_depth() - 1, false,
480 stack_values);
481 GetNodes(values.begin(), base::VectorOf(stack_values));
482 } else {
483 Value* stack_base = ret_count == 0
484 ? nullptr
485 : decoder->stack_value(ret_count + drop_values);
486 GetNodes(values.begin(), stack_base, ret_count);
487 }
488 if (FLAG_trace_wasm && inlined_status_ == kRegularFunction) {
489 builder_->TraceFunctionExit(base::VectorOf(values), decoder->position());
490 }
491 builder_->Return(base::VectorOf(values));
492 SetEnv(internal_env);
493 }
494
BrOrRet(FullDecoder * decoder,uint32_t depth,uint32_t drop_values)495 void BrOrRet(FullDecoder* decoder, uint32_t depth, uint32_t drop_values) {
496 if (depth == decoder->control_depth() - 1) {
497 DoReturn(decoder, drop_values);
498 } else {
499 Control* target = decoder->control_at(depth);
500 if (emit_loop_exits()) {
501 SsaEnv* internal_env = ssa_env_;
502 SsaEnv* exit_env = Split(decoder->zone(), ssa_env_);
503 SetEnv(exit_env);
504 uint32_t value_count = target->br_merge()->arity;
505 auto stack_values = CopyStackValues(decoder, value_count, drop_values);
506 BuildNestedLoopExits(decoder, depth, true, stack_values);
507 MergeValuesInto(decoder, target, target->br_merge(),
508 stack_values.data());
509 SetEnv(internal_env);
510 } else {
511 MergeValuesInto(decoder, target, target->br_merge(), drop_values);
512 }
513 }
514 }
515
BrIf(FullDecoder * decoder,const Value & cond,uint32_t depth)516 void BrIf(FullDecoder* decoder, const Value& cond, uint32_t depth) {
517 SsaEnv* fenv = ssa_env_;
518 SsaEnv* tenv = Split(decoder->zone(), fenv);
519 fenv->SetNotMerged();
520 WasmBranchHint hint = WasmBranchHint::kNoHint;
521 if (branch_hints_) {
522 hint = branch_hints_->GetHintFor(decoder->pc_relative_offset());
523 }
524 switch (hint) {
525 case WasmBranchHint::kNoHint:
526 builder_->BranchNoHint(cond.node, &tenv->control, &fenv->control);
527 break;
528 case WasmBranchHint::kUnlikely:
529 builder_->BranchExpectFalse(cond.node, &tenv->control, &fenv->control);
530 break;
531 case WasmBranchHint::kLikely:
532 builder_->BranchExpectTrue(cond.node, &tenv->control, &fenv->control);
533 break;
534 }
535 builder_->SetControl(fenv->control);
536 SetEnv(tenv);
537 BrOrRet(decoder, depth, 1);
538 SetEnv(fenv);
539 }
540
BrTable(FullDecoder * decoder,const BranchTableImmediate<validate> & imm,const Value & key)541 void BrTable(FullDecoder* decoder, const BranchTableImmediate<validate>& imm,
542 const Value& key) {
543 if (imm.table_count == 0) {
544 // Only a default target. Do the equivalent of br.
545 uint32_t target = BranchTableIterator<validate>(decoder, imm).next();
546 BrOrRet(decoder, target, 1);
547 return;
548 }
549
550 SsaEnv* branch_env = ssa_env_;
551 // Build branches to the various blocks based on the table.
552 TFNode* sw = builder_->Switch(imm.table_count + 1, key.node);
553
554 SsaEnv* copy = Steal(decoder->zone(), branch_env);
555 SetEnv(copy);
556 BranchTableIterator<validate> iterator(decoder, imm);
557 while (iterator.has_next()) {
558 uint32_t i = iterator.cur_index();
559 uint32_t target = iterator.next();
560 SetEnv(Split(decoder->zone(), copy));
561 builder_->SetControl(i == imm.table_count ? builder_->IfDefault(sw)
562 : builder_->IfValue(i, sw));
563 BrOrRet(decoder, target, 1);
564 }
565 DCHECK(decoder->ok());
566 SetEnv(branch_env);
567 }
568
Else(FullDecoder * decoder,Control * if_block)569 void Else(FullDecoder* decoder, Control* if_block) {
570 if (if_block->reachable()) {
571 // Merge the if branch into the end merge.
572 MergeValuesInto(decoder, if_block, &if_block->end_merge);
573 }
574 SetEnv(if_block->false_env);
575 }
576
LoadMem(FullDecoder * decoder,LoadType type,const MemoryAccessImmediate<validate> & imm,const Value & index,Value * result)577 void LoadMem(FullDecoder* decoder, LoadType type,
578 const MemoryAccessImmediate<validate>& imm, const Value& index,
579 Value* result) {
580 result->node =
581 builder_->LoadMem(type.value_type(), type.mem_type(), index.node,
582 imm.offset, imm.alignment, decoder->position());
583 }
584
LoadTransform(FullDecoder * decoder,LoadType type,LoadTransformationKind transform,const MemoryAccessImmediate<validate> & imm,const Value & index,Value * result)585 void LoadTransform(FullDecoder* decoder, LoadType type,
586 LoadTransformationKind transform,
587 const MemoryAccessImmediate<validate>& imm,
588 const Value& index, Value* result) {
589 result->node = builder_->LoadTransform(type.value_type(), type.mem_type(),
590 transform, index.node, imm.offset,
591 imm.alignment, decoder->position());
592 }
593
LoadLane(FullDecoder * decoder,LoadType type,const Value & value,const Value & index,const MemoryAccessImmediate<validate> & imm,const uint8_t laneidx,Value * result)594 void LoadLane(FullDecoder* decoder, LoadType type, const Value& value,
595 const Value& index, const MemoryAccessImmediate<validate>& imm,
596 const uint8_t laneidx, Value* result) {
597 result->node = builder_->LoadLane(
598 type.value_type(), type.mem_type(), value.node, index.node, imm.offset,
599 imm.alignment, laneidx, decoder->position());
600 }
601
StoreMem(FullDecoder * decoder,StoreType type,const MemoryAccessImmediate<validate> & imm,const Value & index,const Value & value)602 void StoreMem(FullDecoder* decoder, StoreType type,
603 const MemoryAccessImmediate<validate>& imm, const Value& index,
604 const Value& value) {
605 builder_->StoreMem(type.mem_rep(), index.node, imm.offset, imm.alignment,
606 value.node, decoder->position(), type.value_type());
607 }
608
StoreLane(FullDecoder * decoder,StoreType type,const MemoryAccessImmediate<validate> & imm,const Value & index,const Value & value,const uint8_t laneidx)609 void StoreLane(FullDecoder* decoder, StoreType type,
610 const MemoryAccessImmediate<validate>& imm, const Value& index,
611 const Value& value, const uint8_t laneidx) {
612 builder_->StoreLane(type.mem_rep(), index.node, imm.offset, imm.alignment,
613 value.node, laneidx, decoder->position(),
614 type.value_type());
615 }
616
CurrentMemoryPages(FullDecoder * decoder,Value * result)617 void CurrentMemoryPages(FullDecoder* decoder, Value* result) {
618 result->node = builder_->CurrentMemoryPages();
619 }
620
MemoryGrow(FullDecoder * decoder,const Value & value,Value * result)621 void MemoryGrow(FullDecoder* decoder, const Value& value, Value* result) {
622 result->node = builder_->MemoryGrow(value.node);
623 // Always reload the instance cache after growing memory.
624 LoadContextIntoSsa(ssa_env_);
625 }
626
CallDirect(FullDecoder * decoder,const CallFunctionImmediate<validate> & imm,const Value args[],Value returns[])627 void CallDirect(FullDecoder* decoder,
628 const CallFunctionImmediate<validate>& imm,
629 const Value args[], Value returns[]) {
630 DoCall(decoder, CallInfo::CallDirect(imm.index), imm.sig, args, returns);
631 }
632
ReturnCall(FullDecoder * decoder,const CallFunctionImmediate<validate> & imm,const Value args[])633 void ReturnCall(FullDecoder* decoder,
634 const CallFunctionImmediate<validate>& imm,
635 const Value args[]) {
636 DoReturnCall(decoder, CallInfo::CallDirect(imm.index), imm.sig, args);
637 }
638
CallIndirect(FullDecoder * decoder,const Value & index,const CallIndirectImmediate<validate> & imm,const Value args[],Value returns[])639 void CallIndirect(FullDecoder* decoder, const Value& index,
640 const CallIndirectImmediate<validate>& imm,
641 const Value args[], Value returns[]) {
642 DoCall(
643 decoder,
644 CallInfo::CallIndirect(index, imm.table_imm.index, imm.sig_imm.index),
645 imm.sig, args, returns);
646 }
647
ReturnCallIndirect(FullDecoder * decoder,const Value & index,const CallIndirectImmediate<validate> & imm,const Value args[])648 void ReturnCallIndirect(FullDecoder* decoder, const Value& index,
649 const CallIndirectImmediate<validate>& imm,
650 const Value args[]) {
651 DoReturnCall(
652 decoder,
653 CallInfo::CallIndirect(index, imm.table_imm.index, imm.sig_imm.index),
654 imm.sig, args);
655 }
656
CallRef(FullDecoder * decoder,const Value & func_ref,const FunctionSig * sig,uint32_t sig_index,const Value args[],Value returns[])657 void CallRef(FullDecoder* decoder, const Value& func_ref,
658 const FunctionSig* sig, uint32_t sig_index, const Value args[],
659 Value returns[]) {
660 if (!FLAG_wasm_inlining) {
661 DoCall(decoder, CallInfo::CallRef(func_ref, NullCheckFor(func_ref.type)),
662 sig, args, returns);
663 return;
664 }
665
666 // Check for equality against a function at a specific index, and if
667 // successful, just emit a direct call.
668 // TODO(12166): For now, we check against function 0. Decide the index based
669 // on liftoff feedback.
670 const uint32_t expected_function_index = 0;
671
672 TFNode* success_control;
673 TFNode* failure_control;
674 builder_->CompareToExternalFunctionAtIndex(
675 func_ref.node, expected_function_index, &success_control,
676 &failure_control);
677 TFNode* initial_effect = effect();
678
679 builder_->SetControl(success_control);
680 ssa_env_->control = success_control;
681 Value* returns_direct =
682 decoder->zone()->NewArray<Value>(sig->return_count());
683 DoCall(decoder, CallInfo::CallDirect(expected_function_index),
684 decoder->module_->signature(sig_index), args, returns_direct);
685 TFNode* control_direct = control();
686 TFNode* effect_direct = effect();
687
688 builder_->SetEffectControl(initial_effect, failure_control);
689 ssa_env_->effect = initial_effect;
690 ssa_env_->control = failure_control;
691 Value* returns_ref = decoder->zone()->NewArray<Value>(sig->return_count());
692 DoCall(decoder, CallInfo::CallRef(func_ref, NullCheckFor(func_ref.type)),
693 sig, args, returns_ref);
694
695 TFNode* control_ref = control();
696 TFNode* effect_ref = effect();
697
698 TFNode* control_args[] = {control_direct, control_ref};
699 TFNode* control = builder_->Merge(2, control_args);
700
701 TFNode* effect_args[] = {effect_direct, effect_ref, control};
702 TFNode* effect = builder_->EffectPhi(2, effect_args);
703
704 ssa_env_->control = control;
705 ssa_env_->effect = effect;
706 builder_->SetEffectControl(effect, control);
707
708 for (uint32_t i = 0; i < sig->return_count(); i++) {
709 TFNode* phi_args[] = {returns_direct[i].node, returns_ref[i].node,
710 control};
711 returns[i].node = builder_->Phi(sig->GetReturn(i), 2, phi_args);
712 }
713 }
714
ReturnCallRef(FullDecoder * decoder,const Value & func_ref,const FunctionSig * sig,uint32_t sig_index,const Value args[])715 void ReturnCallRef(FullDecoder* decoder, const Value& func_ref,
716 const FunctionSig* sig, uint32_t sig_index,
717 const Value args[]) {
718 if (!FLAG_wasm_inlining) {
719 DoReturnCall(decoder,
720 CallInfo::CallRef(func_ref, NullCheckFor(func_ref.type)),
721 sig, args);
722 return;
723 }
724
725 // Check for equality against a function at a specific index, and if
726 // successful, just emit a direct call.
727 // TODO(12166): For now, we check against function 0. Decide the index based
728 // on liftoff feedback.
729 const uint32_t expected_function_index = 0;
730
731 TFNode* success_control;
732 TFNode* failure_control;
733 builder_->CompareToExternalFunctionAtIndex(
734 func_ref.node, expected_function_index, &success_control,
735 &failure_control);
736 TFNode* initial_effect = effect();
737
738 builder_->SetControl(success_control);
739 ssa_env_->control = success_control;
740 DoReturnCall(decoder, CallInfo::CallDirect(expected_function_index), sig,
741 args);
742
743 builder_->SetEffectControl(initial_effect, failure_control);
744 ssa_env_->effect = initial_effect;
745 ssa_env_->control = failure_control;
746 DoReturnCall(decoder,
747 CallInfo::CallRef(func_ref, NullCheckFor(func_ref.type)), sig,
748 args);
749 }
750
BrOnNull(FullDecoder * decoder,const Value & ref_object,uint32_t depth)751 void BrOnNull(FullDecoder* decoder, const Value& ref_object, uint32_t depth) {
752 SsaEnv* false_env = ssa_env_;
753 SsaEnv* true_env = Split(decoder->zone(), false_env);
754 false_env->SetNotMerged();
755 builder_->BrOnNull(ref_object.node, &true_env->control,
756 &false_env->control);
757 builder_->SetControl(false_env->control);
758 SetEnv(true_env);
759 BrOrRet(decoder, depth, 1);
760 SetEnv(false_env);
761 }
762
BrOnNonNull(FullDecoder * decoder,const Value & ref_object,uint32_t depth)763 void BrOnNonNull(FullDecoder* decoder, const Value& ref_object,
764 uint32_t depth) {
765 SsaEnv* false_env = ssa_env_;
766 SsaEnv* true_env = Split(decoder->zone(), false_env);
767 false_env->SetNotMerged();
768 builder_->BrOnNull(ref_object.node, &false_env->control,
769 &true_env->control);
770 builder_->SetControl(false_env->control);
771 SetEnv(true_env);
772 BrOrRet(decoder, depth, 0);
773 SetEnv(false_env);
774 }
775
SimdOp(FullDecoder * decoder,WasmOpcode opcode,base::Vector<Value> args,Value * result)776 void SimdOp(FullDecoder* decoder, WasmOpcode opcode, base::Vector<Value> args,
777 Value* result) {
778 NodeVector inputs(args.size());
779 GetNodes(inputs.begin(), args);
780 TFNode* node = builder_->SimdOp(opcode, inputs.begin());
781 if (result) result->node = node;
782 }
783
SimdLaneOp(FullDecoder * decoder,WasmOpcode opcode,const SimdLaneImmediate<validate> & imm,base::Vector<Value> inputs,Value * result)784 void SimdLaneOp(FullDecoder* decoder, WasmOpcode opcode,
785 const SimdLaneImmediate<validate>& imm,
786 base::Vector<Value> inputs, Value* result) {
787 NodeVector nodes(inputs.size());
788 GetNodes(nodes.begin(), inputs);
789 result->node = builder_->SimdLaneOp(opcode, imm.lane, nodes.begin());
790 }
791
Simd8x16ShuffleOp(FullDecoder * decoder,const Simd128Immediate<validate> & imm,const Value & input0,const Value & input1,Value * result)792 void Simd8x16ShuffleOp(FullDecoder* decoder,
793 const Simd128Immediate<validate>& imm,
794 const Value& input0, const Value& input1,
795 Value* result) {
796 TFNode* input_nodes[] = {input0.node, input1.node};
797 result->node = builder_->Simd8x16ShuffleOp(imm.value, input_nodes);
798 }
799
Throw(FullDecoder * decoder,const TagIndexImmediate<validate> & imm,const base::Vector<Value> & value_args)800 void Throw(FullDecoder* decoder, const TagIndexImmediate<validate>& imm,
801 const base::Vector<Value>& value_args) {
802 int count = value_args.length();
803 ZoneVector<TFNode*> args(count, decoder->zone());
804 for (int i = 0; i < count; ++i) {
805 args[i] = value_args[i].node;
806 }
807 CheckForException(decoder,
808 builder_->Throw(imm.index, imm.tag, base::VectorOf(args),
809 decoder->position()));
810 TerminateThrow(decoder);
811 }
812
Rethrow(FullDecoder * decoder,Control * block)813 void Rethrow(FullDecoder* decoder, Control* block) {
814 DCHECK(block->is_try_catchall() || block->is_try_catch());
815 TFNode* exception = block->try_info->exception;
816 DCHECK_NOT_NULL(exception);
817 CheckForException(decoder, builder_->Rethrow(exception));
818 TerminateThrow(decoder);
819 }
820
CatchException(FullDecoder * decoder,const TagIndexImmediate<validate> & imm,Control * block,base::Vector<Value> values)821 void CatchException(FullDecoder* decoder,
822 const TagIndexImmediate<validate>& imm, Control* block,
823 base::Vector<Value> values) {
824 DCHECK(block->is_try_catch());
825 // The catch block is unreachable if no possible throws in the try block
826 // exist. We only build a landing pad if some node in the try block can
827 // (possibly) throw. Otherwise the catch environments remain empty.
828 if (!block->try_info->might_throw()) {
829 block->reachability = kSpecOnlyReachable;
830 return;
831 }
832
833 TFNode* exception = block->try_info->exception;
834 SetEnv(block->try_info->catch_env);
835
836 TFNode* if_catch = nullptr;
837 TFNode* if_no_catch = nullptr;
838
839 // Get the exception tag and see if it matches the expected one.
840 TFNode* caught_tag = builder_->GetExceptionTag(exception);
841 TFNode* exception_tag = builder_->LoadTagFromTable(imm.index);
842 TFNode* compare = builder_->ExceptionTagEqual(caught_tag, exception_tag);
843 builder_->BranchNoHint(compare, &if_catch, &if_no_catch);
844
845 // If the tags don't match we continue with the next tag by setting the
846 // false environment as the new {TryInfo::catch_env} here.
847 SsaEnv* if_no_catch_env = Split(decoder->zone(), ssa_env_);
848 if_no_catch_env->control = if_no_catch;
849 SsaEnv* if_catch_env = Steal(decoder->zone(), ssa_env_);
850 if_catch_env->control = if_catch;
851 block->try_info->catch_env = if_no_catch_env;
852
853 // If the tags match we extract the values from the exception object and
854 // push them onto the operand stack using the passed {values} vector.
855 SetEnv(if_catch_env);
856 NodeVector caught_values(values.size());
857 base::Vector<TFNode*> caught_vector = base::VectorOf(caught_values);
858 builder_->GetExceptionValues(exception, imm.tag, caught_vector);
859 for (size_t i = 0, e = values.size(); i < e; ++i) {
860 values[i].node = caught_values[i];
861 }
862 }
863
Delegate(FullDecoder * decoder,uint32_t depth,Control * block)864 void Delegate(FullDecoder* decoder, uint32_t depth, Control* block) {
865 DCHECK_EQ(decoder->control_at(0), block);
866 DCHECK(block->is_incomplete_try());
867
868 if (block->try_info->might_throw()) {
869 // Merge the current env into the target handler's env.
870 SetEnv(block->try_info->catch_env);
871 if (depth == decoder->control_depth() - 1) {
872 // We just throw to the caller here, so no need to generate IfSuccess
873 // and IfFailure nodes.
874 builder_->Rethrow(block->try_info->exception);
875 TerminateThrow(decoder);
876 return;
877 }
878 DCHECK(decoder->control_at(depth)->is_try());
879 TryInfo* target_try = decoder->control_at(depth)->try_info;
880 if (emit_loop_exits()) {
881 ValueVector stack_values;
882 BuildNestedLoopExits(decoder, depth, true, stack_values,
883 &block->try_info->exception);
884 }
885 Goto(decoder, target_try->catch_env);
886
887 // Create or merge the exception.
888 if (target_try->catch_env->state == SsaEnv::kReached) {
889 target_try->exception = block->try_info->exception;
890 } else {
891 DCHECK_EQ(target_try->catch_env->state, SsaEnv::kMerged);
892 target_try->exception = builder_->CreateOrMergeIntoPhi(
893 MachineRepresentation::kTagged, target_try->catch_env->control,
894 target_try->exception, block->try_info->exception);
895 }
896 }
897 }
898
CatchAll(FullDecoder * decoder,Control * block)899 void CatchAll(FullDecoder* decoder, Control* block) {
900 DCHECK(block->is_try_catchall() || block->is_try_catch());
901 DCHECK_EQ(decoder->control_at(0), block);
902
903 // The catch block is unreachable if no possible throws in the try block
904 // exist. We only build a landing pad if some node in the try block can
905 // (possibly) throw. Otherwise the catch environments remain empty.
906 if (!block->try_info->might_throw()) {
907 decoder->SetSucceedingCodeDynamicallyUnreachable();
908 return;
909 }
910
911 SetEnv(block->try_info->catch_env);
912 }
913
AtomicOp(FullDecoder * decoder,WasmOpcode opcode,base::Vector<Value> args,const MemoryAccessImmediate<validate> & imm,Value * result)914 void AtomicOp(FullDecoder* decoder, WasmOpcode opcode,
915 base::Vector<Value> args,
916 const MemoryAccessImmediate<validate>& imm, Value* result) {
917 NodeVector inputs(args.size());
918 GetNodes(inputs.begin(), args);
919 TFNode* node = builder_->AtomicOp(opcode, inputs.begin(), imm.alignment,
920 imm.offset, decoder->position());
921 if (result) result->node = node;
922 }
923
AtomicFence(FullDecoder * decoder)924 void AtomicFence(FullDecoder* decoder) { builder_->AtomicFence(); }
925
MemoryInit(FullDecoder * decoder,const MemoryInitImmediate<validate> & imm,const Value & dst,const Value & src,const Value & size)926 void MemoryInit(FullDecoder* decoder,
927 const MemoryInitImmediate<validate>& imm, const Value& dst,
928 const Value& src, const Value& size) {
929 builder_->MemoryInit(imm.data_segment.index, dst.node, src.node, size.node,
930 decoder->position());
931 }
932
DataDrop(FullDecoder * decoder,const IndexImmediate<validate> & imm)933 void DataDrop(FullDecoder* decoder, const IndexImmediate<validate>& imm) {
934 builder_->DataDrop(imm.index, decoder->position());
935 }
936
MemoryCopy(FullDecoder * decoder,const MemoryCopyImmediate<validate> & imm,const Value & dst,const Value & src,const Value & size)937 void MemoryCopy(FullDecoder* decoder,
938 const MemoryCopyImmediate<validate>& imm, const Value& dst,
939 const Value& src, const Value& size) {
940 builder_->MemoryCopy(dst.node, src.node, size.node, decoder->position());
941 }
942
MemoryFill(FullDecoder * decoder,const MemoryIndexImmediate<validate> & imm,const Value & dst,const Value & value,const Value & size)943 void MemoryFill(FullDecoder* decoder,
944 const MemoryIndexImmediate<validate>& imm, const Value& dst,
945 const Value& value, const Value& size) {
946 builder_->MemoryFill(dst.node, value.node, size.node, decoder->position());
947 }
948
TableInit(FullDecoder * decoder,const TableInitImmediate<validate> & imm,base::Vector<Value> args)949 void TableInit(FullDecoder* decoder, const TableInitImmediate<validate>& imm,
950 base::Vector<Value> args) {
951 builder_->TableInit(imm.table.index, imm.element_segment.index,
952 args[0].node, args[1].node, args[2].node,
953 decoder->position());
954 }
955
ElemDrop(FullDecoder * decoder,const IndexImmediate<validate> & imm)956 void ElemDrop(FullDecoder* decoder, const IndexImmediate<validate>& imm) {
957 builder_->ElemDrop(imm.index, decoder->position());
958 }
959
TableCopy(FullDecoder * decoder,const TableCopyImmediate<validate> & imm,base::Vector<Value> args)960 void TableCopy(FullDecoder* decoder, const TableCopyImmediate<validate>& imm,
961 base::Vector<Value> args) {
962 builder_->TableCopy(imm.table_dst.index, imm.table_src.index, args[0].node,
963 args[1].node, args[2].node, decoder->position());
964 }
965
TableGrow(FullDecoder * decoder,const IndexImmediate<validate> & imm,const Value & value,const Value & delta,Value * result)966 void TableGrow(FullDecoder* decoder, const IndexImmediate<validate>& imm,
967 const Value& value, const Value& delta, Value* result) {
968 result->node = builder_->TableGrow(imm.index, value.node, delta.node);
969 }
970
TableSize(FullDecoder * decoder,const IndexImmediate<validate> & imm,Value * result)971 void TableSize(FullDecoder* decoder, const IndexImmediate<validate>& imm,
972 Value* result) {
973 result->node = builder_->TableSize(imm.index);
974 }
975
TableFill(FullDecoder * decoder,const IndexImmediate<validate> & imm,const Value & start,const Value & value,const Value & count)976 void TableFill(FullDecoder* decoder, const IndexImmediate<validate>& imm,
977 const Value& start, const Value& value, const Value& count) {
978 builder_->TableFill(imm.index, start.node, value.node, count.node);
979 }
980
StructNewWithRtt(FullDecoder * decoder,const StructIndexImmediate<validate> & imm,const Value & rtt,const Value args[],Value * result)981 void StructNewWithRtt(FullDecoder* decoder,
982 const StructIndexImmediate<validate>& imm,
983 const Value& rtt, const Value args[], Value* result) {
984 uint32_t field_count = imm.struct_type->field_count();
985 NodeVector arg_nodes(field_count);
986 for (uint32_t i = 0; i < field_count; i++) {
987 arg_nodes[i] = args[i].node;
988 }
989 result->node = builder_->StructNewWithRtt(
990 imm.index, imm.struct_type, rtt.node, base::VectorOf(arg_nodes));
991 }
StructNewDefault(FullDecoder * decoder,const StructIndexImmediate<validate> & imm,const Value & rtt,Value * result)992 void StructNewDefault(FullDecoder* decoder,
993 const StructIndexImmediate<validate>& imm,
994 const Value& rtt, Value* result) {
995 uint32_t field_count = imm.struct_type->field_count();
996 NodeVector arg_nodes(field_count);
997 for (uint32_t i = 0; i < field_count; i++) {
998 arg_nodes[i] = DefaultValue(imm.struct_type->field(i));
999 }
1000 result->node = builder_->StructNewWithRtt(
1001 imm.index, imm.struct_type, rtt.node, base::VectorOf(arg_nodes));
1002 }
1003
StructGet(FullDecoder * decoder,const Value & struct_object,const FieldImmediate<validate> & field,bool is_signed,Value * result)1004 void StructGet(FullDecoder* decoder, const Value& struct_object,
1005 const FieldImmediate<validate>& field, bool is_signed,
1006 Value* result) {
1007 result->node = builder_->StructGet(
1008 struct_object.node, field.struct_imm.struct_type, field.field_imm.index,
1009 NullCheckFor(struct_object.type), is_signed, decoder->position());
1010 }
1011
StructSet(FullDecoder * decoder,const Value & struct_object,const FieldImmediate<validate> & field,const Value & field_value)1012 void StructSet(FullDecoder* decoder, const Value& struct_object,
1013 const FieldImmediate<validate>& field,
1014 const Value& field_value) {
1015 builder_->StructSet(struct_object.node, field.struct_imm.struct_type,
1016 field.field_imm.index, field_value.node,
1017 NullCheckFor(struct_object.type), decoder->position());
1018 }
1019
ArrayNewWithRtt(FullDecoder * decoder,const ArrayIndexImmediate<validate> & imm,const Value & length,const Value & initial_value,const Value & rtt,Value * result)1020 void ArrayNewWithRtt(FullDecoder* decoder,
1021 const ArrayIndexImmediate<validate>& imm,
1022 const Value& length, const Value& initial_value,
1023 const Value& rtt, Value* result) {
1024 result->node = builder_->ArrayNewWithRtt(imm.index, imm.array_type,
1025 length.node, initial_value.node,
1026 rtt.node, decoder->position());
1027 // array.new_with_rtt introduces a loop. Therefore, we have to mark the
1028 // immediately nesting loop (if any) as non-innermost.
1029 if (!loop_infos_.empty()) loop_infos_.back().is_innermost = false;
1030 }
1031
ArrayNewDefault(FullDecoder * decoder,const ArrayIndexImmediate<validate> & imm,const Value & length,const Value & rtt,Value * result)1032 void ArrayNewDefault(FullDecoder* decoder,
1033 const ArrayIndexImmediate<validate>& imm,
1034 const Value& length, const Value& rtt, Value* result) {
1035 // This will cause the default value to be chosen automatically based
1036 // on the element type.
1037 TFNode* initial_value = nullptr;
1038 result->node =
1039 builder_->ArrayNewWithRtt(imm.index, imm.array_type, length.node,
1040 initial_value, rtt.node, decoder->position());
1041 }
1042
ArrayGet(FullDecoder * decoder,const Value & array_obj,const ArrayIndexImmediate<validate> & imm,const Value & index,bool is_signed,Value * result)1043 void ArrayGet(FullDecoder* decoder, const Value& array_obj,
1044 const ArrayIndexImmediate<validate>& imm, const Value& index,
1045 bool is_signed, Value* result) {
1046 result->node = builder_->ArrayGet(array_obj.node, imm.array_type,
1047 index.node, NullCheckFor(array_obj.type),
1048 is_signed, decoder->position());
1049 }
1050
ArraySet(FullDecoder * decoder,const Value & array_obj,const ArrayIndexImmediate<validate> & imm,const Value & index,const Value & value)1051 void ArraySet(FullDecoder* decoder, const Value& array_obj,
1052 const ArrayIndexImmediate<validate>& imm, const Value& index,
1053 const Value& value) {
1054 builder_->ArraySet(array_obj.node, imm.array_type, index.node, value.node,
1055 NullCheckFor(array_obj.type), decoder->position());
1056 }
1057
ArrayLen(FullDecoder * decoder,const Value & array_obj,Value * result)1058 void ArrayLen(FullDecoder* decoder, const Value& array_obj, Value* result) {
1059 result->node = builder_->ArrayLen(
1060 array_obj.node, NullCheckFor(array_obj.type), decoder->position());
1061 }
1062
ArrayCopy(FullDecoder * decoder,const Value & dst,const Value & dst_index,const Value & src,const Value & src_index,const Value & length)1063 void ArrayCopy(FullDecoder* decoder, const Value& dst, const Value& dst_index,
1064 const Value& src, const Value& src_index,
1065 const Value& length) {
1066 builder_->ArrayCopy(dst.node, dst_index.node, NullCheckFor(dst.type),
1067 src.node, src_index.node, NullCheckFor(src.type),
1068 length.node, decoder->position());
1069 }
1070
ArrayInit(FullDecoder * decoder,const ArrayIndexImmediate<validate> & imm,const base::Vector<Value> & elements,const Value & rtt,Value * result)1071 void ArrayInit(FullDecoder* decoder, const ArrayIndexImmediate<validate>& imm,
1072 const base::Vector<Value>& elements, const Value& rtt,
1073 Value* result) {
1074 NodeVector element_nodes(elements.size());
1075 for (uint32_t i = 0; i < elements.size(); i++) {
1076 element_nodes[i] = elements[i].node;
1077 }
1078 result->node = builder_->ArrayInit(imm.index, imm.array_type, rtt.node,
1079 VectorOf(element_nodes));
1080 }
1081
I31New(FullDecoder * decoder,const Value & input,Value * result)1082 void I31New(FullDecoder* decoder, const Value& input, Value* result) {
1083 result->node = builder_->I31New(input.node);
1084 }
1085
I31GetS(FullDecoder * decoder,const Value & input,Value * result)1086 void I31GetS(FullDecoder* decoder, const Value& input, Value* result) {
1087 result->node = builder_->I31GetS(input.node);
1088 }
1089
I31GetU(FullDecoder * decoder,const Value & input,Value * result)1090 void I31GetU(FullDecoder* decoder, const Value& input, Value* result) {
1091 result->node = builder_->I31GetU(input.node);
1092 }
1093
RttCanon(FullDecoder * decoder,uint32_t type_index,Value * result)1094 void RttCanon(FullDecoder* decoder, uint32_t type_index, Value* result) {
1095 result->node = builder_->RttCanon(type_index);
1096 }
1097
RttSub(FullDecoder * decoder,uint32_t type_index,const Value & parent,Value * result,WasmRttSubMode mode)1098 void RttSub(FullDecoder* decoder, uint32_t type_index, const Value& parent,
1099 Value* result, WasmRttSubMode mode) {
1100 result->node = builder_->RttSub(type_index, parent.node, mode);
1101 }
1102
1103 using StaticKnowledge = compiler::WasmGraphBuilder::ObjectReferenceKnowledge;
1104
ComputeStaticKnowledge(ValueType object_type,ValueType rtt_type,const WasmModule * module)1105 StaticKnowledge ComputeStaticKnowledge(ValueType object_type,
1106 ValueType rtt_type,
1107 const WasmModule* module) {
1108 StaticKnowledge result;
1109 result.object_can_be_null = object_type.is_nullable();
1110 DCHECK(object_type.is_object_reference()); // Checked by validation.
1111 // In the bottom case, the result is irrelevant.
1112 result.reference_kind =
1113 rtt_type != kWasmBottom && module->has_signature(rtt_type.ref_index())
1114 ? compiler::WasmGraphBuilder::kFunction
1115 : compiler::WasmGraphBuilder::kArrayOrStruct;
1116 result.rtt_depth = rtt_type.has_depth() ? rtt_type.depth() : -1;
1117 return result;
1118 }
1119
RefTest(FullDecoder * decoder,const Value & object,const Value & rtt,Value * result)1120 void RefTest(FullDecoder* decoder, const Value& object, const Value& rtt,
1121 Value* result) {
1122 StaticKnowledge config =
1123 ComputeStaticKnowledge(object.type, rtt.type, decoder->module_);
1124 result->node = builder_->RefTest(object.node, rtt.node, config);
1125 }
1126
RefCast(FullDecoder * decoder,const Value & object,const Value & rtt,Value * result)1127 void RefCast(FullDecoder* decoder, const Value& object, const Value& rtt,
1128 Value* result) {
1129 StaticKnowledge config =
1130 ComputeStaticKnowledge(object.type, rtt.type, decoder->module_);
1131 result->node =
1132 builder_->RefCast(object.node, rtt.node, config, decoder->position());
1133 }
1134
1135 template <void (compiler::WasmGraphBuilder::*branch_function)(
1136 TFNode*, TFNode*, StaticKnowledge, TFNode**, TFNode**, TFNode**,
1137 TFNode**)>
BrOnCastAbs(FullDecoder * decoder,const Value & object,const Value & rtt,Value * forwarding_value,uint32_t br_depth,bool branch_on_match)1138 void BrOnCastAbs(FullDecoder* decoder, const Value& object, const Value& rtt,
1139 Value* forwarding_value, uint32_t br_depth,
1140 bool branch_on_match) {
1141 StaticKnowledge config =
1142 ComputeStaticKnowledge(object.type, rtt.type, decoder->module_);
1143 SsaEnv* branch_env = Split(decoder->zone(), ssa_env_);
1144 SsaEnv* no_branch_env = Steal(decoder->zone(), ssa_env_);
1145 no_branch_env->SetNotMerged();
1146 SsaEnv* match_env = branch_on_match ? branch_env : no_branch_env;
1147 SsaEnv* no_match_env = branch_on_match ? no_branch_env : branch_env;
1148 (builder_->*branch_function)(object.node, rtt.node, config,
1149 &match_env->control, &match_env->effect,
1150 &no_match_env->control, &no_match_env->effect);
1151 builder_->SetControl(no_branch_env->control);
1152 SetEnv(branch_env);
1153 forwarding_value->node = object.node;
1154 // Currently, br_on_* instructions modify the value stack before calling
1155 // the interface function, so we don't need to drop any values here.
1156 BrOrRet(decoder, br_depth, 0);
1157 SetEnv(no_branch_env);
1158 }
1159
BrOnCast(FullDecoder * decoder,const Value & object,const Value & rtt,Value * value_on_branch,uint32_t br_depth)1160 void BrOnCast(FullDecoder* decoder, const Value& object, const Value& rtt,
1161 Value* value_on_branch, uint32_t br_depth) {
1162 BrOnCastAbs<&compiler::WasmGraphBuilder::BrOnCast>(
1163 decoder, object, rtt, value_on_branch, br_depth, true);
1164 }
1165
BrOnCastFail(FullDecoder * decoder,const Value & object,const Value & rtt,Value * value_on_fallthrough,uint32_t br_depth)1166 void BrOnCastFail(FullDecoder* decoder, const Value& object, const Value& rtt,
1167 Value* value_on_fallthrough, uint32_t br_depth) {
1168 BrOnCastAbs<&compiler::WasmGraphBuilder::BrOnCast>(
1169 decoder, object, rtt, value_on_fallthrough, br_depth, false);
1170 }
1171
RefIsData(FullDecoder * decoder,const Value & object,Value * result)1172 void RefIsData(FullDecoder* decoder, const Value& object, Value* result) {
1173 result->node = builder_->RefIsData(object.node, object.type.is_nullable());
1174 }
1175
RefAsData(FullDecoder * decoder,const Value & object,Value * result)1176 void RefAsData(FullDecoder* decoder, const Value& object, Value* result) {
1177 result->node = builder_->RefAsData(object.node, object.type.is_nullable(),
1178 decoder->position());
1179 }
1180
BrOnData(FullDecoder * decoder,const Value & object,Value * value_on_branch,uint32_t br_depth)1181 void BrOnData(FullDecoder* decoder, const Value& object,
1182 Value* value_on_branch, uint32_t br_depth) {
1183 BrOnCastAbs<&compiler::WasmGraphBuilder::BrOnData>(
1184 decoder, object, Value{nullptr, kWasmBottom}, value_on_branch, br_depth,
1185 true);
1186 }
1187
BrOnNonData(FullDecoder * decoder,const Value & object,Value * value_on_fallthrough,uint32_t br_depth)1188 void BrOnNonData(FullDecoder* decoder, const Value& object,
1189 Value* value_on_fallthrough, uint32_t br_depth) {
1190 BrOnCastAbs<&compiler::WasmGraphBuilder::BrOnData>(
1191 decoder, object, Value{nullptr, kWasmBottom}, value_on_fallthrough,
1192 br_depth, false);
1193 }
1194
RefIsFunc(FullDecoder * decoder,const Value & object,Value * result)1195 void RefIsFunc(FullDecoder* decoder, const Value& object, Value* result) {
1196 result->node = builder_->RefIsFunc(object.node, object.type.is_nullable());
1197 }
1198
RefAsFunc(FullDecoder * decoder,const Value & object,Value * result)1199 void RefAsFunc(FullDecoder* decoder, const Value& object, Value* result) {
1200 result->node = builder_->RefAsFunc(object.node, object.type.is_nullable(),
1201 decoder->position());
1202 }
1203
BrOnFunc(FullDecoder * decoder,const Value & object,Value * value_on_branch,uint32_t br_depth)1204 void BrOnFunc(FullDecoder* decoder, const Value& object,
1205 Value* value_on_branch, uint32_t br_depth) {
1206 BrOnCastAbs<&compiler::WasmGraphBuilder::BrOnFunc>(
1207 decoder, object, Value{nullptr, kWasmBottom}, value_on_branch, br_depth,
1208 true);
1209 }
1210
BrOnNonFunc(FullDecoder * decoder,const Value & object,Value * value_on_fallthrough,uint32_t br_depth)1211 void BrOnNonFunc(FullDecoder* decoder, const Value& object,
1212 Value* value_on_fallthrough, uint32_t br_depth) {
1213 BrOnCastAbs<&compiler::WasmGraphBuilder::BrOnFunc>(
1214 decoder, object, Value{nullptr, kWasmBottom}, value_on_fallthrough,
1215 br_depth, false);
1216 }
1217
RefIsI31(FullDecoder * decoder,const Value & object,Value * result)1218 void RefIsI31(FullDecoder* decoder, const Value& object, Value* result) {
1219 result->node = builder_->RefIsI31(object.node);
1220 }
1221
RefAsI31(FullDecoder * decoder,const Value & object,Value * result)1222 void RefAsI31(FullDecoder* decoder, const Value& object, Value* result) {
1223 result->node = builder_->RefAsI31(object.node, decoder->position());
1224 }
1225
BrOnI31(FullDecoder * decoder,const Value & object,Value * value_on_branch,uint32_t br_depth)1226 void BrOnI31(FullDecoder* decoder, const Value& object,
1227 Value* value_on_branch, uint32_t br_depth) {
1228 BrOnCastAbs<&compiler::WasmGraphBuilder::BrOnI31>(
1229 decoder, object, Value{nullptr, kWasmBottom}, value_on_branch, br_depth,
1230 true);
1231 }
1232
BrOnNonI31(FullDecoder * decoder,const Value & object,Value * value_on_fallthrough,uint32_t br_depth)1233 void BrOnNonI31(FullDecoder* decoder, const Value& object,
1234 Value* value_on_fallthrough, uint32_t br_depth) {
1235 BrOnCastAbs<&compiler::WasmGraphBuilder::BrOnI31>(
1236 decoder, object, Value{nullptr, kWasmBottom}, value_on_fallthrough,
1237 br_depth, false);
1238 }
1239
Forward(FullDecoder * decoder,const Value & from,Value * to)1240 void Forward(FullDecoder* decoder, const Value& from, Value* to) {
1241 to->node = from.node;
1242 }
1243
loop_infos()1244 std::vector<compiler::WasmLoopInfo> loop_infos() { return loop_infos_; }
1245
1246 private:
1247 SsaEnv* ssa_env_ = nullptr;
1248 compiler::WasmGraphBuilder* builder_;
1249 int func_index_;
1250 const BranchHintMap* branch_hints_ = nullptr;
1251 // Tracks loop data for loop unrolling.
1252 std::vector<compiler::WasmLoopInfo> loop_infos_;
1253 InlinedStatus inlined_status_;
1254
effect()1255 TFNode* effect() { return builder_->effect(); }
1256
control()1257 TFNode* control() { return builder_->control(); }
1258
current_try_info(FullDecoder * decoder)1259 TryInfo* current_try_info(FullDecoder* decoder) {
1260 DCHECK_LT(decoder->current_catch(), decoder->control_depth());
1261 return decoder->control_at(decoder->control_depth_of_current_catch())
1262 ->try_info;
1263 }
1264
1265 // Loop exits are only used during loop unrolling and are then removed, as
1266 // they cannot be handled by later optimization stages. Since unrolling comes
1267 // before inlining in the compilation pipeline, we should not emit loop exits
1268 // in inlined functions. Also, we should not do so when unrolling is disabled.
emit_loop_exits()1269 bool emit_loop_exits() {
1270 return FLAG_wasm_loop_unrolling && inlined_status_ == kRegularFunction;
1271 }
1272
GetNodes(TFNode ** nodes,Value * values,size_t count)1273 void GetNodes(TFNode** nodes, Value* values, size_t count) {
1274 for (size_t i = 0; i < count; ++i) {
1275 nodes[i] = values[i].node;
1276 }
1277 }
1278
GetNodes(TFNode ** nodes,base::Vector<Value> values)1279 void GetNodes(TFNode** nodes, base::Vector<Value> values) {
1280 GetNodes(nodes, values.begin(), values.size());
1281 }
1282
SetEnv(SsaEnv * env)1283 void SetEnv(SsaEnv* env) {
1284 if (FLAG_trace_wasm_decoder) {
1285 char state = 'X';
1286 if (env) {
1287 switch (env->state) {
1288 case SsaEnv::kReached:
1289 state = 'R';
1290 break;
1291 case SsaEnv::kUnreachable:
1292 state = 'U';
1293 break;
1294 case SsaEnv::kMerged:
1295 state = 'M';
1296 break;
1297 }
1298 }
1299 PrintF("{set_env = %p, state = %c", env, state);
1300 if (env && env->control) {
1301 PrintF(", control = ");
1302 compiler::WasmGraphBuilder::PrintDebugName(env->control);
1303 }
1304 PrintF("}\n");
1305 }
1306 if (ssa_env_) {
1307 ssa_env_->control = control();
1308 ssa_env_->effect = effect();
1309 }
1310 ssa_env_ = env;
1311 builder_->SetEffectControl(env->effect, env->control);
1312 builder_->set_instance_cache(&env->instance_cache);
1313 }
1314
CheckForException(FullDecoder * decoder,TFNode * node)1315 V8_INLINE TFNode* CheckForException(FullDecoder* decoder, TFNode* node) {
1316 if (node == nullptr) return nullptr;
1317
1318 const bool inside_try_scope = decoder->current_catch() != -1;
1319 if (!inside_try_scope) return node;
1320
1321 return CheckForExceptionImpl(decoder, node);
1322 }
1323
CheckForExceptionImpl(FullDecoder * decoder,TFNode * node)1324 V8_NOINLINE TFNode* CheckForExceptionImpl(FullDecoder* decoder,
1325 TFNode* node) {
1326 TFNode* if_success = nullptr;
1327 TFNode* if_exception = nullptr;
1328 if (!builder_->ThrowsException(node, &if_success, &if_exception)) {
1329 return node;
1330 }
1331
1332 SsaEnv* success_env = Steal(decoder->zone(), ssa_env_);
1333 success_env->control = if_success;
1334
1335 SsaEnv* exception_env = Split(decoder->zone(), success_env);
1336 exception_env->control = if_exception;
1337 exception_env->effect = if_exception;
1338 SetEnv(exception_env);
1339 TryInfo* try_info = current_try_info(decoder);
1340 if (emit_loop_exits()) {
1341 ValueVector values;
1342 BuildNestedLoopExits(decoder, decoder->control_depth_of_current_catch(),
1343 true, values, &if_exception);
1344 }
1345 Goto(decoder, try_info->catch_env);
1346 if (try_info->exception == nullptr) {
1347 DCHECK_EQ(SsaEnv::kReached, try_info->catch_env->state);
1348 try_info->exception = if_exception;
1349 } else {
1350 DCHECK_EQ(SsaEnv::kMerged, try_info->catch_env->state);
1351 try_info->exception = builder_->CreateOrMergeIntoPhi(
1352 MachineRepresentation::kTaggedPointer, try_info->catch_env->control,
1353 try_info->exception, if_exception);
1354 }
1355
1356 SetEnv(success_env);
1357 return node;
1358 }
1359
DefaultValue(ValueType type)1360 TFNode* DefaultValue(ValueType type) {
1361 DCHECK(type.is_defaultable());
1362 switch (type.kind()) {
1363 case kI8:
1364 case kI16:
1365 case kI32:
1366 return builder_->Int32Constant(0);
1367 case kI64:
1368 return builder_->Int64Constant(0);
1369 case kF32:
1370 return builder_->Float32Constant(0);
1371 case kF64:
1372 return builder_->Float64Constant(0);
1373 case kS128:
1374 return builder_->S128Zero();
1375 case kOptRef:
1376 return builder_->RefNull();
1377 case kRtt:
1378 case kRttWithDepth:
1379 case kVoid:
1380 case kBottom:
1381 case kRef:
1382 UNREACHABLE();
1383 }
1384 }
1385
MergeValuesInto(FullDecoder * decoder,Control * c,Merge<Value> * merge,Value * values)1386 void MergeValuesInto(FullDecoder* decoder, Control* c, Merge<Value>* merge,
1387 Value* values) {
1388 DCHECK(merge == &c->start_merge || merge == &c->end_merge);
1389
1390 SsaEnv* target = c->merge_env;
1391 // This has to be computed before calling Goto().
1392 const bool first = target->state == SsaEnv::kUnreachable;
1393
1394 Goto(decoder, target);
1395
1396 if (merge->arity == 0) return;
1397
1398 for (uint32_t i = 0; i < merge->arity; ++i) {
1399 Value& val = values[i];
1400 Value& old = (*merge)[i];
1401 DCHECK_NOT_NULL(val.node);
1402 DCHECK(val.type == kWasmBottom || val.type.machine_representation() ==
1403 old.type.machine_representation());
1404 old.node = first ? val.node
1405 : builder_->CreateOrMergeIntoPhi(
1406 old.type.machine_representation(), target->control,
1407 old.node, val.node);
1408 }
1409 }
1410
MergeValuesInto(FullDecoder * decoder,Control * c,Merge<Value> * merge,uint32_t drop_values=0)1411 void MergeValuesInto(FullDecoder* decoder, Control* c, Merge<Value>* merge,
1412 uint32_t drop_values = 0) {
1413 #ifdef DEBUG
1414 uint32_t avail = decoder->stack_size() -
1415 decoder->control_at(0)->stack_depth - drop_values;
1416 DCHECK_GE(avail, merge->arity);
1417 #endif
1418 Value* stack_values = merge->arity > 0
1419 ? decoder->stack_value(merge->arity + drop_values)
1420 : nullptr;
1421 MergeValuesInto(decoder, c, merge, stack_values);
1422 }
1423
Goto(FullDecoder * decoder,SsaEnv * to)1424 void Goto(FullDecoder* decoder, SsaEnv* to) {
1425 DCHECK_NOT_NULL(to);
1426 switch (to->state) {
1427 case SsaEnv::kUnreachable: { // Overwrite destination.
1428 to->state = SsaEnv::kReached;
1429 // There might be an offset in the locals due to a 'let'.
1430 DCHECK_EQ(ssa_env_->locals.size(), decoder->num_locals());
1431 DCHECK_GE(ssa_env_->locals.size(), to->locals.size());
1432 uint32_t local_count_diff =
1433 static_cast<uint32_t>(ssa_env_->locals.size() - to->locals.size());
1434 to->locals = ssa_env_->locals;
1435 to->locals.erase(to->locals.begin(),
1436 to->locals.begin() + local_count_diff);
1437 to->control = control();
1438 to->effect = effect();
1439 to->instance_cache = ssa_env_->instance_cache;
1440 break;
1441 }
1442 case SsaEnv::kReached: { // Create a new merge.
1443 to->state = SsaEnv::kMerged;
1444 // Merge control.
1445 TFNode* controls[] = {to->control, control()};
1446 TFNode* merge = builder_->Merge(2, controls);
1447 to->control = merge;
1448 // Merge effects.
1449 TFNode* old_effect = effect();
1450 if (old_effect != to->effect) {
1451 TFNode* inputs[] = {to->effect, old_effect, merge};
1452 to->effect = builder_->EffectPhi(2, inputs);
1453 }
1454 // Merge locals.
1455 // There might be an offset in the locals due to a 'let'.
1456 DCHECK_EQ(ssa_env_->locals.size(), decoder->num_locals());
1457 DCHECK_GE(ssa_env_->locals.size(), to->locals.size());
1458 uint32_t local_count_diff =
1459 static_cast<uint32_t>(ssa_env_->locals.size() - to->locals.size());
1460 for (uint32_t i = 0; i < to->locals.size(); i++) {
1461 TFNode* a = to->locals[i];
1462 TFNode* b = ssa_env_->locals[i + local_count_diff];
1463 if (a != b) {
1464 TFNode* inputs[] = {a, b, merge};
1465 to->locals[i] = builder_->Phi(
1466 decoder->local_type(i + local_count_diff), 2, inputs);
1467 }
1468 }
1469 // Start a new merge from the instance cache.
1470 builder_->NewInstanceCacheMerge(&to->instance_cache,
1471 &ssa_env_->instance_cache, merge);
1472 break;
1473 }
1474 case SsaEnv::kMerged: {
1475 TFNode* merge = to->control;
1476 // Extend the existing merge control node.
1477 builder_->AppendToMerge(merge, control());
1478 // Merge effects.
1479 to->effect =
1480 builder_->CreateOrMergeIntoEffectPhi(merge, to->effect, effect());
1481 // Merge locals.
1482 // There might be an offset in the locals due to a 'let'.
1483 DCHECK_EQ(ssa_env_->locals.size(), decoder->num_locals());
1484 DCHECK_GE(ssa_env_->locals.size(), to->locals.size());
1485 uint32_t local_count_diff =
1486 static_cast<uint32_t>(ssa_env_->locals.size() - to->locals.size());
1487 for (uint32_t i = 0; i < to->locals.size(); i++) {
1488 to->locals[i] = builder_->CreateOrMergeIntoPhi(
1489 decoder->local_type(i + local_count_diff)
1490 .machine_representation(),
1491 merge, to->locals[i], ssa_env_->locals[i + local_count_diff]);
1492 }
1493 // Merge the instance caches.
1494 builder_->MergeInstanceCacheInto(&to->instance_cache,
1495 &ssa_env_->instance_cache, merge);
1496 break;
1497 }
1498 default:
1499 UNREACHABLE();
1500 }
1501 }
1502
1503 // Create a complete copy of {from}.
Split(Zone * zone,SsaEnv * from)1504 SsaEnv* Split(Zone* zone, SsaEnv* from) {
1505 DCHECK_NOT_NULL(from);
1506 if (from == ssa_env_) {
1507 ssa_env_->control = control();
1508 ssa_env_->effect = effect();
1509 }
1510 SsaEnv* result = zone->New<SsaEnv>(*from);
1511 result->state = SsaEnv::kReached;
1512 return result;
1513 }
1514
1515 // Create a copy of {from} that steals its state and leaves {from}
1516 // unreachable.
Steal(Zone * zone,SsaEnv * from)1517 SsaEnv* Steal(Zone* zone, SsaEnv* from) {
1518 DCHECK_NOT_NULL(from);
1519 if (from == ssa_env_) {
1520 ssa_env_->control = control();
1521 ssa_env_->effect = effect();
1522 }
1523 SsaEnv* result = zone->New<SsaEnv>(std::move(*from));
1524 // Restore the length of {from->locals} after applying move-constructor.
1525 from->locals.resize(result->locals.size());
1526 result->state = SsaEnv::kReached;
1527 return result;
1528 }
1529
1530 class CallInfo {
1531 public:
1532 enum CallMode { kCallDirect, kCallIndirect, kCallRef };
1533
CallDirect(uint32_t callee_index)1534 static CallInfo CallDirect(uint32_t callee_index) {
1535 return {kCallDirect, callee_index, nullptr, 0,
1536 CheckForNull::kWithoutNullCheck};
1537 }
1538
CallIndirect(const Value & index_value,uint32_t table_index,uint32_t sig_index)1539 static CallInfo CallIndirect(const Value& index_value, uint32_t table_index,
1540 uint32_t sig_index) {
1541 return {kCallIndirect, sig_index, &index_value, table_index,
1542 CheckForNull::kWithoutNullCheck};
1543 }
1544
CallRef(const Value & funcref_value,CheckForNull null_check)1545 static CallInfo CallRef(const Value& funcref_value,
1546 CheckForNull null_check) {
1547 return {kCallRef, 0, &funcref_value, 0, null_check};
1548 }
1549
call_mode()1550 CallMode call_mode() { return call_mode_; }
1551
sig_index()1552 uint32_t sig_index() {
1553 DCHECK_EQ(call_mode_, kCallIndirect);
1554 return callee_or_sig_index_;
1555 }
1556
callee_index()1557 uint32_t callee_index() {
1558 DCHECK_EQ(call_mode_, kCallDirect);
1559 return callee_or_sig_index_;
1560 }
1561
null_check()1562 CheckForNull null_check() {
1563 DCHECK_EQ(call_mode_, kCallRef);
1564 return null_check_;
1565 }
1566
index_or_callee_value()1567 const Value* index_or_callee_value() {
1568 DCHECK_NE(call_mode_, kCallDirect);
1569 return index_or_callee_value_;
1570 }
1571
table_index()1572 uint32_t table_index() {
1573 DCHECK_EQ(call_mode_, kCallIndirect);
1574 return table_index_;
1575 }
1576
1577 private:
CallInfo(CallMode call_mode,uint32_t callee_or_sig_index,const Value * index_or_callee_value,uint32_t table_index,CheckForNull null_check)1578 CallInfo(CallMode call_mode, uint32_t callee_or_sig_index,
1579 const Value* index_or_callee_value, uint32_t table_index,
1580 CheckForNull null_check)
1581 : call_mode_(call_mode),
1582 callee_or_sig_index_(callee_or_sig_index),
1583 index_or_callee_value_(index_or_callee_value),
1584 table_index_(table_index),
1585 null_check_(null_check) {}
1586 CallMode call_mode_;
1587 uint32_t callee_or_sig_index_;
1588 const Value* index_or_callee_value_;
1589 uint32_t table_index_;
1590 CheckForNull null_check_;
1591 };
1592
DoCall(FullDecoder * decoder,CallInfo call_info,const FunctionSig * sig,const Value args[],Value returns[])1593 void DoCall(FullDecoder* decoder, CallInfo call_info, const FunctionSig* sig,
1594 const Value args[], Value returns[]) {
1595 size_t param_count = sig->parameter_count();
1596 size_t return_count = sig->return_count();
1597 NodeVector arg_nodes(param_count + 1);
1598 base::SmallVector<TFNode*, 1> return_nodes(return_count);
1599 arg_nodes[0] = (call_info.call_mode() == CallInfo::kCallDirect)
1600 ? nullptr
1601 : call_info.index_or_callee_value()->node;
1602
1603 for (size_t i = 0; i < param_count; ++i) {
1604 arg_nodes[i + 1] = args[i].node;
1605 }
1606 switch (call_info.call_mode()) {
1607 case CallInfo::kCallIndirect:
1608 CheckForException(
1609 decoder, builder_->CallIndirect(
1610 call_info.table_index(), call_info.sig_index(),
1611 base::VectorOf(arg_nodes),
1612 base::VectorOf(return_nodes), decoder->position()));
1613 break;
1614 case CallInfo::kCallDirect:
1615 CheckForException(
1616 decoder, builder_->CallDirect(
1617 call_info.callee_index(), base::VectorOf(arg_nodes),
1618 base::VectorOf(return_nodes), decoder->position()));
1619 break;
1620 case CallInfo::kCallRef:
1621 CheckForException(
1622 decoder,
1623 builder_->CallRef(sig, base::VectorOf(arg_nodes),
1624 base::VectorOf(return_nodes),
1625 call_info.null_check(), decoder->position()));
1626 break;
1627 }
1628 for (size_t i = 0; i < return_count; ++i) {
1629 returns[i].node = return_nodes[i];
1630 }
1631 // The invoked function could have used grow_memory, so we need to
1632 // reload mem_size and mem_start.
1633 LoadContextIntoSsa(ssa_env_);
1634 }
1635
DoReturnCall(FullDecoder * decoder,CallInfo call_info,const FunctionSig * sig,const Value args[])1636 void DoReturnCall(FullDecoder* decoder, CallInfo call_info,
1637 const FunctionSig* sig, const Value args[]) {
1638 size_t arg_count = sig->parameter_count();
1639
1640 ValueVector arg_values(arg_count + 1);
1641 if (call_info.call_mode() == CallInfo::kCallDirect) {
1642 arg_values[0].node = nullptr;
1643 } else {
1644 arg_values[0] = *call_info.index_or_callee_value();
1645 // This is not done by copy assignment.
1646 arg_values[0].node = call_info.index_or_callee_value()->node;
1647 }
1648 if (arg_count > 0) {
1649 std::memcpy(arg_values.data() + 1, args, arg_count * sizeof(Value));
1650 }
1651
1652 if (emit_loop_exits()) {
1653 BuildNestedLoopExits(decoder, decoder->control_depth(), false,
1654 arg_values);
1655 }
1656
1657 NodeVector arg_nodes(arg_count + 1);
1658 GetNodes(arg_nodes.data(), base::VectorOf(arg_values));
1659
1660 switch (call_info.call_mode()) {
1661 case CallInfo::kCallIndirect:
1662 CheckForException(decoder,
1663 builder_->ReturnCallIndirect(
1664 call_info.table_index(), call_info.sig_index(),
1665 base::VectorOf(arg_nodes), decoder->position()));
1666 break;
1667 case CallInfo::kCallDirect:
1668 CheckForException(decoder,
1669 builder_->ReturnCall(call_info.callee_index(),
1670 base::VectorOf(arg_nodes),
1671 decoder->position()));
1672 break;
1673 case CallInfo::kCallRef:
1674 CheckForException(
1675 decoder, builder_->ReturnCallRef(sig, base::VectorOf(arg_nodes),
1676 call_info.null_check(),
1677 decoder->position()));
1678 break;
1679 }
1680 }
1681
BuildLoopExits(FullDecoder * decoder,Control * loop)1682 void BuildLoopExits(FullDecoder* decoder, Control* loop) {
1683 builder_->LoopExit(loop->loop_node);
1684 ssa_env_->control = control();
1685 ssa_env_->effect = effect();
1686 }
1687
WrapLocalsAtLoopExit(FullDecoder * decoder,Control * loop)1688 void WrapLocalsAtLoopExit(FullDecoder* decoder, Control* loop) {
1689 for (uint32_t index = 0; index < decoder->num_locals(); index++) {
1690 if (loop->loop_assignments->Contains(static_cast<int>(index))) {
1691 ssa_env_->locals[index] = builder_->LoopExitValue(
1692 ssa_env_->locals[index],
1693 decoder->local_type(index).machine_representation());
1694 }
1695 }
1696 if (loop->loop_assignments->Contains(decoder->num_locals())) {
1697 #define WRAP_CACHE_FIELD(field) \
1698 if (ssa_env_->instance_cache.field != nullptr) { \
1699 ssa_env_->instance_cache.field = builder_->LoopExitValue( \
1700 ssa_env_->instance_cache.field, MachineType::PointerRepresentation()); \
1701 }
1702
1703 WRAP_CACHE_FIELD(mem_start);
1704 WRAP_CACHE_FIELD(mem_size);
1705 #undef WRAP_CACHE_FIELD
1706 }
1707 }
1708
BuildNestedLoopExits(FullDecoder * decoder,uint32_t depth_limit,bool wrap_exit_values,ValueVector & stack_values,TFNode ** exception_value=nullptr)1709 void BuildNestedLoopExits(FullDecoder* decoder, uint32_t depth_limit,
1710 bool wrap_exit_values, ValueVector& stack_values,
1711 TFNode** exception_value = nullptr) {
1712 DCHECK(emit_loop_exits());
1713 Control* control = nullptr;
1714 // We are only interested in exits from the innermost loop.
1715 for (uint32_t i = 0; i < depth_limit; i++) {
1716 Control* c = decoder->control_at(i);
1717 if (c->is_loop()) {
1718 control = c;
1719 break;
1720 }
1721 }
1722 if (control != nullptr) {
1723 BuildLoopExits(decoder, control);
1724 for (Value& value : stack_values) {
1725 if (value.node != nullptr) {
1726 value.node = builder_->LoopExitValue(
1727 value.node, value.type.machine_representation());
1728 }
1729 }
1730 if (exception_value != nullptr) {
1731 *exception_value = builder_->LoopExitValue(
1732 *exception_value, MachineRepresentation::kWord32);
1733 }
1734 if (wrap_exit_values) {
1735 WrapLocalsAtLoopExit(decoder, control);
1736 }
1737 }
1738 }
1739
TerminateThrow(FullDecoder * decoder)1740 void TerminateThrow(FullDecoder* decoder) {
1741 if (emit_loop_exits()) {
1742 SsaEnv* internal_env = ssa_env_;
1743 SsaEnv* exit_env = Split(decoder->zone(), ssa_env_);
1744 SetEnv(exit_env);
1745 ValueVector stack_values;
1746 BuildNestedLoopExits(decoder, decoder->control_depth(), false,
1747 stack_values);
1748 builder_->TerminateThrow(effect(), control());
1749 SetEnv(internal_env);
1750 } else {
1751 builder_->TerminateThrow(effect(), control());
1752 }
1753 }
1754
NullCheckFor(ValueType type)1755 CheckForNull NullCheckFor(ValueType type) {
1756 DCHECK(type.is_object_reference());
1757 return type.is_nullable() ? CheckForNull::kWithNullCheck
1758 : CheckForNull::kWithoutNullCheck;
1759 }
1760 };
1761
1762 } // namespace
1763
BuildTFGraph(AccountingAllocator * allocator,const WasmFeatures & enabled,const WasmModule * module,compiler::WasmGraphBuilder * builder,WasmFeatures * detected,const FunctionBody & body,std::vector<compiler::WasmLoopInfo> * loop_infos,compiler::NodeOriginTable * node_origins,int func_index,InlinedStatus inlined_status)1764 DecodeResult BuildTFGraph(AccountingAllocator* allocator,
1765 const WasmFeatures& enabled, const WasmModule* module,
1766 compiler::WasmGraphBuilder* builder,
1767 WasmFeatures* detected, const FunctionBody& body,
1768 std::vector<compiler::WasmLoopInfo>* loop_infos,
1769 compiler::NodeOriginTable* node_origins,
1770 int func_index, InlinedStatus inlined_status) {
1771 Zone zone(allocator, ZONE_NAME);
1772 WasmFullDecoder<Decoder::kFullValidation, WasmGraphBuildingInterface> decoder(
1773 &zone, module, enabled, detected, body, builder, func_index,
1774 inlined_status);
1775 if (node_origins) {
1776 builder->AddBytecodePositionDecorator(node_origins, &decoder);
1777 }
1778 decoder.Decode();
1779 if (node_origins) {
1780 builder->RemoveBytecodePositionDecorator();
1781 }
1782 if (FLAG_wasm_loop_unrolling && inlined_status == kRegularFunction) {
1783 *loop_infos = decoder.interface().loop_infos();
1784 }
1785 return decoder.toResult(nullptr);
1786 }
1787
1788 } // namespace wasm
1789 } // namespace internal
1790 } // namespace v8
1791