1 // Copyright 2016 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/code-stub-assembler.h"
6 
7 #include "src/code-factory.h"
8 #include "src/frames-inl.h"
9 #include "src/frames.h"
10 #include "src/objects/api-callbacks.h"
11 #include "src/objects/ordered-hash-table-inl.h"
12 
13 namespace v8 {
14 namespace internal {
15 
16 using compiler::Node;
17 template <class T>
18 using TNode = compiler::TNode<T>;
19 
CodeStubAssembler(compiler::CodeAssemblerState * state)20 CodeStubAssembler::CodeStubAssembler(compiler::CodeAssemblerState* state)
21     : compiler::CodeAssembler(state) {
22   if (DEBUG_BOOL && FLAG_csa_trap_on_node != nullptr) {
23     HandleBreakOnNode();
24   }
25 }
26 
HandleBreakOnNode()27 void CodeStubAssembler::HandleBreakOnNode() {
28   // FLAG_csa_trap_on_node should be in a form "STUB,NODE" where STUB is a
29   // string specifying the name of a stub and NODE is number specifying node id.
30   const char* name = state()->name();
31   size_t name_length = strlen(name);
32   if (strncmp(FLAG_csa_trap_on_node, name, name_length) != 0) {
33     // Different name.
34     return;
35   }
36   size_t option_length = strlen(FLAG_csa_trap_on_node);
37   if (option_length < name_length + 2 ||
38       FLAG_csa_trap_on_node[name_length] != ',') {
39     // Option is too short.
40     return;
41   }
42   const char* start = &FLAG_csa_trap_on_node[name_length + 1];
43   char* end;
44   int node_id = static_cast<int>(strtol(start, &end, 10));
45   if (start == end) {
46     // Bad node id.
47     return;
48   }
49   BreakOnNode(node_id);
50 }
51 
Assert(const BranchGenerator & branch,const char * message,const char * file,int line,Node * extra_node1,const char * extra_node1_name,Node * extra_node2,const char * extra_node2_name,Node * extra_node3,const char * extra_node3_name,Node * extra_node4,const char * extra_node4_name,Node * extra_node5,const char * extra_node5_name)52 void CodeStubAssembler::Assert(const BranchGenerator& branch,
53                                const char* message, const char* file, int line,
54                                Node* extra_node1, const char* extra_node1_name,
55                                Node* extra_node2, const char* extra_node2_name,
56                                Node* extra_node3, const char* extra_node3_name,
57                                Node* extra_node4, const char* extra_node4_name,
58                                Node* extra_node5,
59                                const char* extra_node5_name) {
60 #if defined(DEBUG)
61   if (FLAG_debug_code) {
62     Check(branch, message, file, line, extra_node1, extra_node1_name,
63           extra_node2, extra_node2_name, extra_node3, extra_node3_name,
64           extra_node4, extra_node4_name, extra_node5, extra_node5_name);
65   }
66 #endif
67 }
68 
Assert(const NodeGenerator & condition_body,const char * message,const char * file,int line,Node * extra_node1,const char * extra_node1_name,Node * extra_node2,const char * extra_node2_name,Node * extra_node3,const char * extra_node3_name,Node * extra_node4,const char * extra_node4_name,Node * extra_node5,const char * extra_node5_name)69 void CodeStubAssembler::Assert(const NodeGenerator& condition_body,
70                                const char* message, const char* file, int line,
71                                Node* extra_node1, const char* extra_node1_name,
72                                Node* extra_node2, const char* extra_node2_name,
73                                Node* extra_node3, const char* extra_node3_name,
74                                Node* extra_node4, const char* extra_node4_name,
75                                Node* extra_node5,
76                                const char* extra_node5_name) {
77 #if defined(DEBUG)
78   if (FLAG_debug_code) {
79     Check(condition_body, message, file, line, extra_node1, extra_node1_name,
80           extra_node2, extra_node2_name, extra_node3, extra_node3_name,
81           extra_node4, extra_node4_name, extra_node5, extra_node5_name);
82   }
83 #endif
84 }
85 
86 #ifdef DEBUG
87 namespace {
MaybePrintNodeWithName(CodeStubAssembler * csa,Node * node,const char * node_name)88 void MaybePrintNodeWithName(CodeStubAssembler* csa, Node* node,
89                             const char* node_name) {
90   if (node != nullptr) {
91     csa->CallRuntime(Runtime::kPrintWithNameForAssert, csa->SmiConstant(0),
92                      csa->StringConstant(node_name), node);
93   }
94 }
95 }  // namespace
96 #endif
97 
Check(const BranchGenerator & branch,const char * message,const char * file,int line,Node * extra_node1,const char * extra_node1_name,Node * extra_node2,const char * extra_node2_name,Node * extra_node3,const char * extra_node3_name,Node * extra_node4,const char * extra_node4_name,Node * extra_node5,const char * extra_node5_name)98 void CodeStubAssembler::Check(const BranchGenerator& branch,
99                               const char* message, const char* file, int line,
100                               Node* extra_node1, const char* extra_node1_name,
101                               Node* extra_node2, const char* extra_node2_name,
102                               Node* extra_node3, const char* extra_node3_name,
103                               Node* extra_node4, const char* extra_node4_name,
104                               Node* extra_node5, const char* extra_node5_name) {
105   Label ok(this);
106   Label not_ok(this, Label::kDeferred);
107   if (message != nullptr && FLAG_code_comments) {
108     Comment("[ Assert: %s", message);
109   } else {
110     Comment("[ Assert");
111   }
112   branch(&ok, &not_ok);
113 
114   BIND(&not_ok);
115   DCHECK_NOT_NULL(message);
116   char chars[1024];
117   Vector<char> buffer(chars);
118   if (file != nullptr) {
119     SNPrintF(buffer, "CSA_ASSERT failed: %s [%s:%d]\n", message, file, line);
120   } else {
121     SNPrintF(buffer, "CSA_ASSERT failed: %s\n", message);
122   }
123   Node* message_node = StringConstant(&(buffer[0]));
124 
125 #ifdef DEBUG
126   // Only print the extra nodes in debug builds.
127   MaybePrintNodeWithName(this, extra_node1, extra_node1_name);
128   MaybePrintNodeWithName(this, extra_node2, extra_node2_name);
129   MaybePrintNodeWithName(this, extra_node3, extra_node3_name);
130   MaybePrintNodeWithName(this, extra_node4, extra_node4_name);
131   MaybePrintNodeWithName(this, extra_node5, extra_node5_name);
132 #endif
133 
134   DebugAbort(message_node);
135   Unreachable();
136 
137   BIND(&ok);
138   Comment("] Assert");
139 }
140 
Check(const NodeGenerator & condition_body,const char * message,const char * file,int line,Node * extra_node1,const char * extra_node1_name,Node * extra_node2,const char * extra_node2_name,Node * extra_node3,const char * extra_node3_name,Node * extra_node4,const char * extra_node4_name,Node * extra_node5,const char * extra_node5_name)141 void CodeStubAssembler::Check(const NodeGenerator& condition_body,
142                               const char* message, const char* file, int line,
143                               Node* extra_node1, const char* extra_node1_name,
144                               Node* extra_node2, const char* extra_node2_name,
145                               Node* extra_node3, const char* extra_node3_name,
146                               Node* extra_node4, const char* extra_node4_name,
147                               Node* extra_node5, const char* extra_node5_name) {
148   BranchGenerator branch = [=](Label* ok, Label* not_ok) {
149     Node* condition = condition_body();
150     DCHECK_NOT_NULL(condition);
151     Branch(condition, ok, not_ok);
152   };
153 
154   Check(branch, message, file, line, extra_node1, extra_node1_name, extra_node2,
155         extra_node2_name, extra_node3, extra_node3_name, extra_node4,
156         extra_node4_name, extra_node5, extra_node5_name);
157 }
158 
SelectImpl(TNode<BoolT> condition,const NodeGenerator & true_body,const NodeGenerator & false_body,MachineRepresentation rep)159 Node* CodeStubAssembler::SelectImpl(TNode<BoolT> condition,
160                                     const NodeGenerator& true_body,
161                                     const NodeGenerator& false_body,
162                                     MachineRepresentation rep) {
163   VARIABLE(value, rep);
164   Label vtrue(this), vfalse(this), end(this);
165   Branch(condition, &vtrue, &vfalse);
166 
167   BIND(&vtrue);
168   {
169     value.Bind(true_body());
170     Goto(&end);
171   }
172   BIND(&vfalse);
173   {
174     value.Bind(false_body());
175     Goto(&end);
176   }
177 
178   BIND(&end);
179   return value.value();
180 }
181 
SelectInt32Constant(SloppyTNode<BoolT> condition,int true_value,int false_value)182 TNode<Int32T> CodeStubAssembler::SelectInt32Constant(
183     SloppyTNode<BoolT> condition, int true_value, int false_value) {
184   return SelectConstant<Int32T>(condition, Int32Constant(true_value),
185                                 Int32Constant(false_value));
186 }
187 
SelectIntPtrConstant(SloppyTNode<BoolT> condition,int true_value,int false_value)188 TNode<IntPtrT> CodeStubAssembler::SelectIntPtrConstant(
189     SloppyTNode<BoolT> condition, int true_value, int false_value) {
190   return SelectConstant<IntPtrT>(condition, IntPtrConstant(true_value),
191                                  IntPtrConstant(false_value));
192 }
193 
SelectBooleanConstant(SloppyTNode<BoolT> condition)194 TNode<Oddball> CodeStubAssembler::SelectBooleanConstant(
195     SloppyTNode<BoolT> condition) {
196   return SelectConstant<Oddball>(condition, TrueConstant(), FalseConstant());
197 }
198 
SelectSmiConstant(SloppyTNode<BoolT> condition,Smi * true_value,Smi * false_value)199 TNode<Smi> CodeStubAssembler::SelectSmiConstant(SloppyTNode<BoolT> condition,
200                                                 Smi* true_value,
201                                                 Smi* false_value) {
202   return SelectConstant<Smi>(condition, SmiConstant(true_value),
203                              SmiConstant(false_value));
204 }
205 
NoContextConstant()206 Node* CodeStubAssembler::NoContextConstant() {
207   return SmiConstant(Context::kNoContext);
208 }
209 
210 #define HEAP_CONSTANT_ACCESSOR(rootIndexName, rootAccessorName, name) \
211   compiler::TNode<std::remove_reference<decltype(                     \
212       *std::declval<Heap>().rootAccessorName())>::type>               \
213       CodeStubAssembler::name##Constant() {                           \
214     return UncheckedCast<std::remove_reference<decltype(              \
215         *std::declval<Heap>().rootAccessorName())>::type>(            \
216         LoadRoot(Heap::k##rootIndexName##RootIndex));                 \
217   }
218 HEAP_CONSTANT_LIST(HEAP_CONSTANT_ACCESSOR);
219 #undef HEAP_CONSTANT_ACCESSOR
220 
221 #define HEAP_CONSTANT_TEST(rootIndexName, rootAccessorName, name) \
222   compiler::TNode<BoolT> CodeStubAssembler::Is##name(             \
223       SloppyTNode<Object> value) {                                \
224     return WordEqual(value, name##Constant());                    \
225   }                                                               \
226   compiler::TNode<BoolT> CodeStubAssembler::IsNot##name(          \
227       SloppyTNode<Object> value) {                                \
228     return WordNotEqual(value, name##Constant());                 \
229   }
230 HEAP_CONSTANT_LIST(HEAP_CONSTANT_TEST);
231 #undef HEAP_CONSTANT_TEST
232 
IntPtrOrSmiConstant(int value,ParameterMode mode)233 Node* CodeStubAssembler::IntPtrOrSmiConstant(int value, ParameterMode mode) {
234   if (mode == SMI_PARAMETERS) {
235     return SmiConstant(value);
236   } else {
237     DCHECK_EQ(INTPTR_PARAMETERS, mode);
238     return IntPtrConstant(value);
239   }
240 }
241 
IsIntPtrOrSmiConstantZero(Node * test,ParameterMode mode)242 bool CodeStubAssembler::IsIntPtrOrSmiConstantZero(Node* test,
243                                                   ParameterMode mode) {
244   int32_t constant_test;
245   Smi* smi_test;
246   if (mode == INTPTR_PARAMETERS) {
247     if (ToInt32Constant(test, constant_test) && constant_test == 0) {
248       return true;
249     }
250   } else {
251     DCHECK_EQ(mode, SMI_PARAMETERS);
252     if (ToSmiConstant(test, smi_test) && smi_test->value() == 0) {
253       return true;
254     }
255   }
256   return false;
257 }
258 
TryGetIntPtrOrSmiConstantValue(Node * maybe_constant,int * value,ParameterMode mode)259 bool CodeStubAssembler::TryGetIntPtrOrSmiConstantValue(Node* maybe_constant,
260                                                        int* value,
261                                                        ParameterMode mode) {
262   int32_t int32_constant;
263   if (mode == INTPTR_PARAMETERS) {
264     if (ToInt32Constant(maybe_constant, int32_constant)) {
265       *value = int32_constant;
266       return true;
267     }
268   } else {
269     DCHECK_EQ(mode, SMI_PARAMETERS);
270     Smi* smi_constant;
271     if (ToSmiConstant(maybe_constant, smi_constant)) {
272       *value = Smi::ToInt(smi_constant);
273       return true;
274     }
275   }
276   return false;
277 }
278 
IntPtrRoundUpToPowerOfTwo32(TNode<IntPtrT> value)279 TNode<IntPtrT> CodeStubAssembler::IntPtrRoundUpToPowerOfTwo32(
280     TNode<IntPtrT> value) {
281   Comment("IntPtrRoundUpToPowerOfTwo32");
282   CSA_ASSERT(this, UintPtrLessThanOrEqual(value, IntPtrConstant(0x80000000u)));
283   value = Signed(IntPtrSub(value, IntPtrConstant(1)));
284   for (int i = 1; i <= 16; i *= 2) {
285     value = Signed(WordOr(value, WordShr(value, IntPtrConstant(i))));
286   }
287   return Signed(IntPtrAdd(value, IntPtrConstant(1)));
288 }
289 
MatchesParameterMode(Node * value,ParameterMode mode)290 Node* CodeStubAssembler::MatchesParameterMode(Node* value, ParameterMode mode) {
291   if (mode == SMI_PARAMETERS) {
292     return TaggedIsSmi(value);
293   } else {
294     return Int32Constant(1);
295   }
296 }
297 
WordIsPowerOfTwo(SloppyTNode<IntPtrT> value)298 TNode<BoolT> CodeStubAssembler::WordIsPowerOfTwo(SloppyTNode<IntPtrT> value) {
299   // value && !(value & (value - 1))
300   return WordEqual(
301       Select<IntPtrT>(
302           WordEqual(value, IntPtrConstant(0)),
303           [=] { return IntPtrConstant(1); },
304           [=] { return WordAnd(value, IntPtrSub(value, IntPtrConstant(1))); }),
305       IntPtrConstant(0));
306 }
307 
Float64Round(SloppyTNode<Float64T> x)308 TNode<Float64T> CodeStubAssembler::Float64Round(SloppyTNode<Float64T> x) {
309   Node* one = Float64Constant(1.0);
310   Node* one_half = Float64Constant(0.5);
311 
312   Label return_x(this);
313 
314   // Round up {x} towards Infinity.
315   VARIABLE(var_x, MachineRepresentation::kFloat64, Float64Ceil(x));
316 
317   GotoIf(Float64LessThanOrEqual(Float64Sub(var_x.value(), one_half), x),
318          &return_x);
319   var_x.Bind(Float64Sub(var_x.value(), one));
320   Goto(&return_x);
321 
322   BIND(&return_x);
323   return TNode<Float64T>::UncheckedCast(var_x.value());
324 }
325 
Float64Ceil(SloppyTNode<Float64T> x)326 TNode<Float64T> CodeStubAssembler::Float64Ceil(SloppyTNode<Float64T> x) {
327   if (IsFloat64RoundUpSupported()) {
328     return Float64RoundUp(x);
329   }
330 
331   Node* one = Float64Constant(1.0);
332   Node* zero = Float64Constant(0.0);
333   Node* two_52 = Float64Constant(4503599627370496.0E0);
334   Node* minus_two_52 = Float64Constant(-4503599627370496.0E0);
335 
336   VARIABLE(var_x, MachineRepresentation::kFloat64, x);
337   Label return_x(this), return_minus_x(this);
338 
339   // Check if {x} is greater than zero.
340   Label if_xgreaterthanzero(this), if_xnotgreaterthanzero(this);
341   Branch(Float64GreaterThan(x, zero), &if_xgreaterthanzero,
342          &if_xnotgreaterthanzero);
343 
344   BIND(&if_xgreaterthanzero);
345   {
346     // Just return {x} unless it's in the range ]0,2^52[.
347     GotoIf(Float64GreaterThanOrEqual(x, two_52), &return_x);
348 
349     // Round positive {x} towards Infinity.
350     var_x.Bind(Float64Sub(Float64Add(two_52, x), two_52));
351     GotoIfNot(Float64LessThan(var_x.value(), x), &return_x);
352     var_x.Bind(Float64Add(var_x.value(), one));
353     Goto(&return_x);
354   }
355 
356   BIND(&if_xnotgreaterthanzero);
357   {
358     // Just return {x} unless it's in the range ]-2^52,0[
359     GotoIf(Float64LessThanOrEqual(x, minus_two_52), &return_x);
360     GotoIfNot(Float64LessThan(x, zero), &return_x);
361 
362     // Round negated {x} towards Infinity and return the result negated.
363     Node* minus_x = Float64Neg(x);
364     var_x.Bind(Float64Sub(Float64Add(two_52, minus_x), two_52));
365     GotoIfNot(Float64GreaterThan(var_x.value(), minus_x), &return_minus_x);
366     var_x.Bind(Float64Sub(var_x.value(), one));
367     Goto(&return_minus_x);
368   }
369 
370   BIND(&return_minus_x);
371   var_x.Bind(Float64Neg(var_x.value()));
372   Goto(&return_x);
373 
374   BIND(&return_x);
375   return TNode<Float64T>::UncheckedCast(var_x.value());
376 }
377 
Float64Floor(SloppyTNode<Float64T> x)378 TNode<Float64T> CodeStubAssembler::Float64Floor(SloppyTNode<Float64T> x) {
379   if (IsFloat64RoundDownSupported()) {
380     return Float64RoundDown(x);
381   }
382 
383   Node* one = Float64Constant(1.0);
384   Node* zero = Float64Constant(0.0);
385   Node* two_52 = Float64Constant(4503599627370496.0E0);
386   Node* minus_two_52 = Float64Constant(-4503599627370496.0E0);
387 
388   VARIABLE(var_x, MachineRepresentation::kFloat64, x);
389   Label return_x(this), return_minus_x(this);
390 
391   // Check if {x} is greater than zero.
392   Label if_xgreaterthanzero(this), if_xnotgreaterthanzero(this);
393   Branch(Float64GreaterThan(x, zero), &if_xgreaterthanzero,
394          &if_xnotgreaterthanzero);
395 
396   BIND(&if_xgreaterthanzero);
397   {
398     // Just return {x} unless it's in the range ]0,2^52[.
399     GotoIf(Float64GreaterThanOrEqual(x, two_52), &return_x);
400 
401     // Round positive {x} towards -Infinity.
402     var_x.Bind(Float64Sub(Float64Add(two_52, x), two_52));
403     GotoIfNot(Float64GreaterThan(var_x.value(), x), &return_x);
404     var_x.Bind(Float64Sub(var_x.value(), one));
405     Goto(&return_x);
406   }
407 
408   BIND(&if_xnotgreaterthanzero);
409   {
410     // Just return {x} unless it's in the range ]-2^52,0[
411     GotoIf(Float64LessThanOrEqual(x, minus_two_52), &return_x);
412     GotoIfNot(Float64LessThan(x, zero), &return_x);
413 
414     // Round negated {x} towards -Infinity and return the result negated.
415     Node* minus_x = Float64Neg(x);
416     var_x.Bind(Float64Sub(Float64Add(two_52, minus_x), two_52));
417     GotoIfNot(Float64LessThan(var_x.value(), minus_x), &return_minus_x);
418     var_x.Bind(Float64Add(var_x.value(), one));
419     Goto(&return_minus_x);
420   }
421 
422   BIND(&return_minus_x);
423   var_x.Bind(Float64Neg(var_x.value()));
424   Goto(&return_x);
425 
426   BIND(&return_x);
427   return TNode<Float64T>::UncheckedCast(var_x.value());
428 }
429 
Float64RoundToEven(SloppyTNode<Float64T> x)430 TNode<Float64T> CodeStubAssembler::Float64RoundToEven(SloppyTNode<Float64T> x) {
431   if (IsFloat64RoundTiesEvenSupported()) {
432     return Float64RoundTiesEven(x);
433   }
434   // See ES#sec-touint8clamp for details.
435   Node* f = Float64Floor(x);
436   Node* f_and_half = Float64Add(f, Float64Constant(0.5));
437 
438   VARIABLE(var_result, MachineRepresentation::kFloat64);
439   Label return_f(this), return_f_plus_one(this), done(this);
440 
441   GotoIf(Float64LessThan(f_and_half, x), &return_f_plus_one);
442   GotoIf(Float64LessThan(x, f_and_half), &return_f);
443   {
444     Node* f_mod_2 = Float64Mod(f, Float64Constant(2.0));
445     Branch(Float64Equal(f_mod_2, Float64Constant(0.0)), &return_f,
446            &return_f_plus_one);
447   }
448 
449   BIND(&return_f);
450   var_result.Bind(f);
451   Goto(&done);
452 
453   BIND(&return_f_plus_one);
454   var_result.Bind(Float64Add(f, Float64Constant(1.0)));
455   Goto(&done);
456 
457   BIND(&done);
458   return TNode<Float64T>::UncheckedCast(var_result.value());
459 }
460 
Float64Trunc(SloppyTNode<Float64T> x)461 TNode<Float64T> CodeStubAssembler::Float64Trunc(SloppyTNode<Float64T> x) {
462   if (IsFloat64RoundTruncateSupported()) {
463     return Float64RoundTruncate(x);
464   }
465 
466   Node* one = Float64Constant(1.0);
467   Node* zero = Float64Constant(0.0);
468   Node* two_52 = Float64Constant(4503599627370496.0E0);
469   Node* minus_two_52 = Float64Constant(-4503599627370496.0E0);
470 
471   VARIABLE(var_x, MachineRepresentation::kFloat64, x);
472   Label return_x(this), return_minus_x(this);
473 
474   // Check if {x} is greater than 0.
475   Label if_xgreaterthanzero(this), if_xnotgreaterthanzero(this);
476   Branch(Float64GreaterThan(x, zero), &if_xgreaterthanzero,
477          &if_xnotgreaterthanzero);
478 
479   BIND(&if_xgreaterthanzero);
480   {
481     if (IsFloat64RoundDownSupported()) {
482       var_x.Bind(Float64RoundDown(x));
483     } else {
484       // Just return {x} unless it's in the range ]0,2^52[.
485       GotoIf(Float64GreaterThanOrEqual(x, two_52), &return_x);
486 
487       // Round positive {x} towards -Infinity.
488       var_x.Bind(Float64Sub(Float64Add(two_52, x), two_52));
489       GotoIfNot(Float64GreaterThan(var_x.value(), x), &return_x);
490       var_x.Bind(Float64Sub(var_x.value(), one));
491     }
492     Goto(&return_x);
493   }
494 
495   BIND(&if_xnotgreaterthanzero);
496   {
497     if (IsFloat64RoundUpSupported()) {
498       var_x.Bind(Float64RoundUp(x));
499       Goto(&return_x);
500     } else {
501       // Just return {x} unless its in the range ]-2^52,0[.
502       GotoIf(Float64LessThanOrEqual(x, minus_two_52), &return_x);
503       GotoIfNot(Float64LessThan(x, zero), &return_x);
504 
505       // Round negated {x} towards -Infinity and return result negated.
506       Node* minus_x = Float64Neg(x);
507       var_x.Bind(Float64Sub(Float64Add(two_52, minus_x), two_52));
508       GotoIfNot(Float64GreaterThan(var_x.value(), minus_x), &return_minus_x);
509       var_x.Bind(Float64Sub(var_x.value(), one));
510       Goto(&return_minus_x);
511     }
512   }
513 
514   BIND(&return_minus_x);
515   var_x.Bind(Float64Neg(var_x.value()));
516   Goto(&return_x);
517 
518   BIND(&return_x);
519   return TNode<Float64T>::UncheckedCast(var_x.value());
520 }
521 
SmiShiftBitsConstant()522 Node* CodeStubAssembler::SmiShiftBitsConstant() {
523   return IntPtrConstant(kSmiShiftSize + kSmiTagSize);
524 }
525 
SmiFromInt32(SloppyTNode<Int32T> value)526 TNode<Smi> CodeStubAssembler::SmiFromInt32(SloppyTNode<Int32T> value) {
527   TNode<IntPtrT> value_intptr = ChangeInt32ToIntPtr(value);
528   return BitcastWordToTaggedSigned(
529       WordShl(value_intptr, SmiShiftBitsConstant()));
530 }
531 
IsValidPositiveSmi(TNode<IntPtrT> value)532 TNode<BoolT> CodeStubAssembler::IsValidPositiveSmi(TNode<IntPtrT> value) {
533   intptr_t constant_value;
534   if (ToIntPtrConstant(value, constant_value)) {
535     return (static_cast<uintptr_t>(constant_value) <=
536             static_cast<uintptr_t>(Smi::kMaxValue))
537                ? Int32TrueConstant()
538                : Int32FalseConstant();
539   }
540 
541   return UintPtrLessThanOrEqual(value, IntPtrConstant(Smi::kMaxValue));
542 }
543 
SmiTag(SloppyTNode<IntPtrT> value)544 TNode<Smi> CodeStubAssembler::SmiTag(SloppyTNode<IntPtrT> value) {
545   int32_t constant_value;
546   if (ToInt32Constant(value, constant_value) && Smi::IsValid(constant_value)) {
547     return SmiConstant(constant_value);
548   }
549   return BitcastWordToTaggedSigned(WordShl(value, SmiShiftBitsConstant()));
550 }
551 
SmiUntag(SloppyTNode<Smi> value)552 TNode<IntPtrT> CodeStubAssembler::SmiUntag(SloppyTNode<Smi> value) {
553   intptr_t constant_value;
554   if (ToIntPtrConstant(value, constant_value)) {
555     return IntPtrConstant(constant_value >> (kSmiShiftSize + kSmiTagSize));
556   }
557   return UncheckedCast<IntPtrT>(
558       WordSar(BitcastTaggedToWord(value), SmiShiftBitsConstant()));
559 }
560 
SmiToInt32(SloppyTNode<Smi> value)561 TNode<Int32T> CodeStubAssembler::SmiToInt32(SloppyTNode<Smi> value) {
562   TNode<IntPtrT> result = SmiUntag(value);
563   return TruncateIntPtrToInt32(result);
564 }
565 
SmiToFloat64(SloppyTNode<Smi> value)566 TNode<Float64T> CodeStubAssembler::SmiToFloat64(SloppyTNode<Smi> value) {
567   return ChangeInt32ToFloat64(SmiToInt32(value));
568 }
569 
SmiMax(TNode<Smi> a,TNode<Smi> b)570 TNode<Smi> CodeStubAssembler::SmiMax(TNode<Smi> a, TNode<Smi> b) {
571   return SelectConstant<Smi>(SmiLessThan(a, b), b, a);
572 }
573 
SmiMin(TNode<Smi> a,TNode<Smi> b)574 TNode<Smi> CodeStubAssembler::SmiMin(TNode<Smi> a, TNode<Smi> b) {
575   return SelectConstant<Smi>(SmiLessThan(a, b), a, b);
576 }
577 
TrySmiAdd(TNode<Smi> lhs,TNode<Smi> rhs,Label * if_overflow)578 TNode<Smi> CodeStubAssembler::TrySmiAdd(TNode<Smi> lhs, TNode<Smi> rhs,
579                                         Label* if_overflow) {
580   TNode<PairT<IntPtrT, BoolT>> pair =
581       IntPtrAddWithOverflow(BitcastTaggedToWord(lhs), BitcastTaggedToWord(rhs));
582   TNode<BoolT> overflow = Projection<1>(pair);
583   GotoIf(overflow, if_overflow);
584   TNode<IntPtrT> result = Projection<0>(pair);
585   return BitcastWordToTaggedSigned(result);
586 }
587 
TrySmiSub(TNode<Smi> lhs,TNode<Smi> rhs,Label * if_overflow)588 TNode<Smi> CodeStubAssembler::TrySmiSub(TNode<Smi> lhs, TNode<Smi> rhs,
589                                         Label* if_overflow) {
590   TNode<PairT<IntPtrT, BoolT>> pair =
591       IntPtrSubWithOverflow(BitcastTaggedToWord(lhs), BitcastTaggedToWord(rhs));
592   TNode<BoolT> overflow = Projection<1>(pair);
593   GotoIf(overflow, if_overflow);
594   TNode<IntPtrT> result = Projection<0>(pair);
595   return BitcastWordToTaggedSigned(result);
596 }
597 
NumberMax(SloppyTNode<Object> a,SloppyTNode<Object> b)598 TNode<Object> CodeStubAssembler::NumberMax(SloppyTNode<Object> a,
599                                            SloppyTNode<Object> b) {
600   // TODO(danno): This could be optimized by specifically handling smi cases.
601   VARIABLE(result, MachineRepresentation::kTagged);
602   Label done(this), greater_than_equal_a(this), greater_than_equal_b(this);
603   GotoIfNumberGreaterThanOrEqual(a, b, &greater_than_equal_a);
604   GotoIfNumberGreaterThanOrEqual(b, a, &greater_than_equal_b);
605   result.Bind(NanConstant());
606   Goto(&done);
607   BIND(&greater_than_equal_a);
608   result.Bind(a);
609   Goto(&done);
610   BIND(&greater_than_equal_b);
611   result.Bind(b);
612   Goto(&done);
613   BIND(&done);
614   return TNode<Object>::UncheckedCast(result.value());
615 }
616 
NumberMin(SloppyTNode<Object> a,SloppyTNode<Object> b)617 TNode<Object> CodeStubAssembler::NumberMin(SloppyTNode<Object> a,
618                                            SloppyTNode<Object> b) {
619   // TODO(danno): This could be optimized by specifically handling smi cases.
620   VARIABLE(result, MachineRepresentation::kTagged);
621   Label done(this), greater_than_equal_a(this), greater_than_equal_b(this);
622   GotoIfNumberGreaterThanOrEqual(a, b, &greater_than_equal_a);
623   GotoIfNumberGreaterThanOrEqual(b, a, &greater_than_equal_b);
624   result.Bind(NanConstant());
625   Goto(&done);
626   BIND(&greater_than_equal_a);
627   result.Bind(b);
628   Goto(&done);
629   BIND(&greater_than_equal_b);
630   result.Bind(a);
631   Goto(&done);
632   BIND(&done);
633   return TNode<Object>::UncheckedCast(result.value());
634 }
635 
ConvertToRelativeIndex(TNode<Context> context,TNode<Object> index,TNode<IntPtrT> length)636 TNode<IntPtrT> CodeStubAssembler::ConvertToRelativeIndex(
637     TNode<Context> context, TNode<Object> index, TNode<IntPtrT> length) {
638   TVARIABLE(IntPtrT, result);
639 
640   TNode<Number> const index_int =
641       ToInteger_Inline(context, index, CodeStubAssembler::kTruncateMinusZero);
642   TNode<IntPtrT> zero = IntPtrConstant(0);
643 
644   Label done(this);
645   Label if_issmi(this), if_isheapnumber(this, Label::kDeferred);
646   Branch(TaggedIsSmi(index_int), &if_issmi, &if_isheapnumber);
647 
648   BIND(&if_issmi);
649   {
650     TNode<Smi> const index_smi = CAST(index_int);
651     result = Select<IntPtrT>(
652         IntPtrLessThan(SmiUntag(index_smi), zero),
653         [=] { return IntPtrMax(IntPtrAdd(length, SmiUntag(index_smi)), zero); },
654         [=] { return IntPtrMin(SmiUntag(index_smi), length); });
655     Goto(&done);
656   }
657 
658   BIND(&if_isheapnumber);
659   {
660     // If {index} is a heap number, it is definitely out of bounds. If it is
661     // negative, {index} = max({length} + {index}),0) = 0'. If it is positive,
662     // set {index} to {length}.
663     TNode<HeapNumber> const index_hn = CAST(index_int);
664     TNode<Float64T> const float_zero = Float64Constant(0.);
665     TNode<Float64T> const index_float = LoadHeapNumberValue(index_hn);
666     result = SelectConstant<IntPtrT>(Float64LessThan(index_float, float_zero),
667                                      zero, length);
668     Goto(&done);
669   }
670   BIND(&done);
671   return result.value();
672 }
673 
SmiMod(TNode<Smi> a,TNode<Smi> b)674 TNode<Number> CodeStubAssembler::SmiMod(TNode<Smi> a, TNode<Smi> b) {
675   TVARIABLE(Number, var_result);
676   Label return_result(this, &var_result),
677       return_minuszero(this, Label::kDeferred),
678       return_nan(this, Label::kDeferred);
679 
680   // Untag {a} and {b}.
681   TNode<Int32T> int_a = SmiToInt32(a);
682   TNode<Int32T> int_b = SmiToInt32(b);
683 
684   // Return NaN if {b} is zero.
685   GotoIf(Word32Equal(int_b, Int32Constant(0)), &return_nan);
686 
687   // Check if {a} is non-negative.
688   Label if_aisnotnegative(this), if_aisnegative(this, Label::kDeferred);
689   Branch(Int32LessThanOrEqual(Int32Constant(0), int_a), &if_aisnotnegative,
690          &if_aisnegative);
691 
692   BIND(&if_aisnotnegative);
693   {
694     // Fast case, don't need to check any other edge cases.
695     TNode<Int32T> r = Int32Mod(int_a, int_b);
696     var_result = SmiFromInt32(r);
697     Goto(&return_result);
698   }
699 
700   BIND(&if_aisnegative);
701   {
702     if (SmiValuesAre32Bits()) {
703       // Check if {a} is kMinInt and {b} is -1 (only relevant if the
704       // kMinInt is actually representable as a Smi).
705       Label join(this);
706       GotoIfNot(Word32Equal(int_a, Int32Constant(kMinInt)), &join);
707       GotoIf(Word32Equal(int_b, Int32Constant(-1)), &return_minuszero);
708       Goto(&join);
709       BIND(&join);
710     }
711 
712     // Perform the integer modulus operation.
713     TNode<Int32T> r = Int32Mod(int_a, int_b);
714 
715     // Check if {r} is zero, and if so return -0, because we have to
716     // take the sign of the left hand side {a}, which is negative.
717     GotoIf(Word32Equal(r, Int32Constant(0)), &return_minuszero);
718 
719     // The remainder {r} can be outside the valid Smi range on 32bit
720     // architectures, so we cannot just say SmiFromInt32(r) here.
721     var_result = ChangeInt32ToTagged(r);
722     Goto(&return_result);
723   }
724 
725   BIND(&return_minuszero);
726   var_result = MinusZeroConstant();
727   Goto(&return_result);
728 
729   BIND(&return_nan);
730   var_result = NanConstant();
731   Goto(&return_result);
732 
733   BIND(&return_result);
734   return var_result.value();
735 }
736 
SmiMul(TNode<Smi> a,TNode<Smi> b)737 TNode<Number> CodeStubAssembler::SmiMul(TNode<Smi> a, TNode<Smi> b) {
738   TVARIABLE(Number, var_result);
739   VARIABLE(var_lhs_float64, MachineRepresentation::kFloat64);
740   VARIABLE(var_rhs_float64, MachineRepresentation::kFloat64);
741   Label return_result(this, &var_result);
742 
743   // Both {a} and {b} are Smis. Convert them to integers and multiply.
744   Node* lhs32 = SmiToInt32(a);
745   Node* rhs32 = SmiToInt32(b);
746   Node* pair = Int32MulWithOverflow(lhs32, rhs32);
747 
748   Node* overflow = Projection(1, pair);
749 
750   // Check if the multiplication overflowed.
751   Label if_overflow(this, Label::kDeferred), if_notoverflow(this);
752   Branch(overflow, &if_overflow, &if_notoverflow);
753   BIND(&if_notoverflow);
754   {
755     // If the answer is zero, we may need to return -0.0, depending on the
756     // input.
757     Label answer_zero(this), answer_not_zero(this);
758     Node* answer = Projection(0, pair);
759     Node* zero = Int32Constant(0);
760     Branch(Word32Equal(answer, zero), &answer_zero, &answer_not_zero);
761     BIND(&answer_not_zero);
762     {
763       var_result = ChangeInt32ToTagged(answer);
764       Goto(&return_result);
765     }
766     BIND(&answer_zero);
767     {
768       Node* or_result = Word32Or(lhs32, rhs32);
769       Label if_should_be_negative_zero(this), if_should_be_zero(this);
770       Branch(Int32LessThan(or_result, zero), &if_should_be_negative_zero,
771              &if_should_be_zero);
772       BIND(&if_should_be_negative_zero);
773       {
774         var_result = MinusZeroConstant();
775         Goto(&return_result);
776       }
777       BIND(&if_should_be_zero);
778       {
779         var_result = SmiConstant(0);
780         Goto(&return_result);
781       }
782     }
783   }
784   BIND(&if_overflow);
785   {
786     var_lhs_float64.Bind(SmiToFloat64(a));
787     var_rhs_float64.Bind(SmiToFloat64(b));
788     Node* value = Float64Mul(var_lhs_float64.value(), var_rhs_float64.value());
789     var_result = AllocateHeapNumberWithValue(value);
790     Goto(&return_result);
791   }
792 
793   BIND(&return_result);
794   return var_result.value();
795 }
796 
TrySmiDiv(TNode<Smi> dividend,TNode<Smi> divisor,Label * bailout)797 TNode<Smi> CodeStubAssembler::TrySmiDiv(TNode<Smi> dividend, TNode<Smi> divisor,
798                                         Label* bailout) {
799   // Both {a} and {b} are Smis. Bailout to floating point division if {divisor}
800   // is zero.
801   GotoIf(WordEqual(divisor, SmiConstant(0)), bailout);
802 
803   // Do floating point division if {dividend} is zero and {divisor} is
804   // negative.
805   Label dividend_is_zero(this), dividend_is_not_zero(this);
806   Branch(WordEqual(dividend, SmiConstant(0)), &dividend_is_zero,
807          &dividend_is_not_zero);
808 
809   BIND(&dividend_is_zero);
810   {
811     GotoIf(SmiLessThan(divisor, SmiConstant(0)), bailout);
812     Goto(&dividend_is_not_zero);
813   }
814   BIND(&dividend_is_not_zero);
815 
816   TNode<Int32T> untagged_divisor = SmiToInt32(divisor);
817   TNode<Int32T> untagged_dividend = SmiToInt32(dividend);
818 
819   // Do floating point division if {dividend} is kMinInt (or kMinInt - 1
820   // if the Smi size is 31) and {divisor} is -1.
821   Label divisor_is_minus_one(this), divisor_is_not_minus_one(this);
822   Branch(Word32Equal(untagged_divisor, Int32Constant(-1)),
823          &divisor_is_minus_one, &divisor_is_not_minus_one);
824 
825   BIND(&divisor_is_minus_one);
826   {
827     GotoIf(Word32Equal(
828                untagged_dividend,
829                Int32Constant(kSmiValueSize == 32 ? kMinInt : (kMinInt >> 1))),
830            bailout);
831     Goto(&divisor_is_not_minus_one);
832   }
833   BIND(&divisor_is_not_minus_one);
834 
835   TNode<Int32T> untagged_result = Int32Div(untagged_dividend, untagged_divisor);
836   TNode<Int32T> truncated = Signed(Int32Mul(untagged_result, untagged_divisor));
837 
838   // Do floating point division if the remainder is not 0.
839   GotoIf(Word32NotEqual(untagged_dividend, truncated), bailout);
840 
841   return SmiFromInt32(untagged_result);
842 }
843 
TruncateIntPtrToInt32(SloppyTNode<IntPtrT> value)844 TNode<Int32T> CodeStubAssembler::TruncateIntPtrToInt32(
845     SloppyTNode<IntPtrT> value) {
846   if (Is64()) {
847     return TruncateInt64ToInt32(ReinterpretCast<Int64T>(value));
848   }
849   return ReinterpretCast<Int32T>(value);
850 }
851 
TaggedIsSmi(SloppyTNode<Object> a)852 TNode<BoolT> CodeStubAssembler::TaggedIsSmi(SloppyTNode<Object> a) {
853   return WordEqual(WordAnd(BitcastTaggedToWord(a), IntPtrConstant(kSmiTagMask)),
854                    IntPtrConstant(0));
855 }
856 
TaggedIsSmi(TNode<MaybeObject> a)857 TNode<BoolT> CodeStubAssembler::TaggedIsSmi(TNode<MaybeObject> a) {
858   return WordEqual(
859       WordAnd(BitcastMaybeObjectToWord(a), IntPtrConstant(kSmiTagMask)),
860       IntPtrConstant(0));
861 }
862 
TaggedIsNotSmi(SloppyTNode<Object> a)863 TNode<BoolT> CodeStubAssembler::TaggedIsNotSmi(SloppyTNode<Object> a) {
864   return WordNotEqual(
865       WordAnd(BitcastTaggedToWord(a), IntPtrConstant(kSmiTagMask)),
866       IntPtrConstant(0));
867 }
868 
TaggedIsPositiveSmi(SloppyTNode<Object> a)869 TNode<BoolT> CodeStubAssembler::TaggedIsPositiveSmi(SloppyTNode<Object> a) {
870   return WordEqual(WordAnd(BitcastTaggedToWord(a),
871                            IntPtrConstant(kSmiTagMask | kSmiSignMask)),
872                    IntPtrConstant(0));
873 }
874 
WordIsWordAligned(SloppyTNode<WordT> word)875 TNode<BoolT> CodeStubAssembler::WordIsWordAligned(SloppyTNode<WordT> word) {
876   return WordEqual(IntPtrConstant(0),
877                    WordAnd(word, IntPtrConstant(kPointerSize - 1)));
878 }
879 
880 #if DEBUG
Bind(Label * label,AssemblerDebugInfo debug_info)881 void CodeStubAssembler::Bind(Label* label, AssemblerDebugInfo debug_info) {
882   CodeAssembler::Bind(label, debug_info);
883 }
884 #else
Bind(Label * label)885 void CodeStubAssembler::Bind(Label* label) { CodeAssembler::Bind(label); }
886 #endif  // DEBUG
887 
LoadDoubleWithHoleCheck(TNode<FixedDoubleArray> array,TNode<Smi> index,Label * if_hole)888 TNode<Float64T> CodeStubAssembler::LoadDoubleWithHoleCheck(
889     TNode<FixedDoubleArray> array, TNode<Smi> index, Label* if_hole) {
890   return TNode<Float64T>::UncheckedCast(LoadFixedDoubleArrayElement(
891       array, index, MachineType::Float64(), 0, SMI_PARAMETERS, if_hole));
892 }
893 
BranchIfPrototypesHaveNoElements(Node * receiver_map,Label * definitely_no_elements,Label * possibly_elements)894 void CodeStubAssembler::BranchIfPrototypesHaveNoElements(
895     Node* receiver_map, Label* definitely_no_elements,
896     Label* possibly_elements) {
897   CSA_SLOW_ASSERT(this, IsMap(receiver_map));
898   VARIABLE(var_map, MachineRepresentation::kTagged, receiver_map);
899   Label loop_body(this, &var_map);
900   Node* empty_fixed_array = LoadRoot(Heap::kEmptyFixedArrayRootIndex);
901   Node* empty_slow_element_dictionary =
902       LoadRoot(Heap::kEmptySlowElementDictionaryRootIndex);
903   Goto(&loop_body);
904 
905   BIND(&loop_body);
906   {
907     Node* map = var_map.value();
908     Node* prototype = LoadMapPrototype(map);
909     GotoIf(IsNull(prototype), definitely_no_elements);
910     Node* prototype_map = LoadMap(prototype);
911     TNode<Int32T> prototype_instance_type = LoadMapInstanceType(prototype_map);
912 
913     // Pessimistically assume elements if a Proxy, Special API Object,
914     // or JSValue wrapper is found on the prototype chain. After this
915     // instance type check, it's not necessary to check for interceptors or
916     // access checks.
917     Label if_custom(this, Label::kDeferred), if_notcustom(this);
918     Branch(IsCustomElementsReceiverInstanceType(prototype_instance_type),
919            &if_custom, &if_notcustom);
920 
921     BIND(&if_custom);
922     {
923       // For string JSValue wrappers we still support the checks as long
924       // as they wrap the empty string.
925       GotoIfNot(InstanceTypeEqual(prototype_instance_type, JS_VALUE_TYPE),
926                 possibly_elements);
927       Node* prototype_value = LoadJSValueValue(prototype);
928       Branch(IsEmptyString(prototype_value), &if_notcustom, possibly_elements);
929     }
930 
931     BIND(&if_notcustom);
932     {
933       Node* prototype_elements = LoadElements(prototype);
934       var_map.Bind(prototype_map);
935       GotoIf(WordEqual(prototype_elements, empty_fixed_array), &loop_body);
936       Branch(WordEqual(prototype_elements, empty_slow_element_dictionary),
937              &loop_body, possibly_elements);
938     }
939   }
940 }
941 
BranchIfJSReceiver(Node * object,Label * if_true,Label * if_false)942 void CodeStubAssembler::BranchIfJSReceiver(Node* object, Label* if_true,
943                                            Label* if_false) {
944   GotoIf(TaggedIsSmi(object), if_false);
945   STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
946   Branch(IsJSReceiver(object), if_true, if_false);
947 }
948 
IsFastJSArray(SloppyTNode<Object> object,SloppyTNode<Context> context)949 TNode<BoolT> CodeStubAssembler::IsFastJSArray(SloppyTNode<Object> object,
950                                               SloppyTNode<Context> context) {
951   Label if_true(this), if_false(this, Label::kDeferred), exit(this);
952   BranchIfFastJSArray(object, context, &if_true, &if_false);
953   TVARIABLE(BoolT, var_result);
954   BIND(&if_true);
955   {
956     var_result = Int32TrueConstant();
957     Goto(&exit);
958   }
959   BIND(&if_false);
960   {
961     var_result = Int32FalseConstant();
962     Goto(&exit);
963   }
964   BIND(&exit);
965   return var_result.value();
966 }
967 
IsFastJSArrayWithNoCustomIteration(TNode<Object> object,TNode<Context> context,TNode<Context> native_context)968 TNode<BoolT> CodeStubAssembler::IsFastJSArrayWithNoCustomIteration(
969     TNode<Object> object, TNode<Context> context,
970     TNode<Context> native_context) {
971   Label if_false(this, Label::kDeferred), if_fast(this), exit(this);
972   TVARIABLE(BoolT, var_result);
973   GotoIfForceSlowPath(&if_false);
974   BranchIfFastJSArray(object, context, &if_fast, &if_false);
975   BIND(&if_fast);
976   {
977     // Check that the Array.prototype hasn't been modified in a way that would
978     // affect iteration.
979     Node* protector_cell = LoadRoot(Heap::kArrayIteratorProtectorRootIndex);
980     DCHECK(isolate()->heap()->array_iterator_protector()->IsPropertyCell());
981     var_result =
982         WordEqual(LoadObjectField(protector_cell, PropertyCell::kValueOffset),
983                   SmiConstant(Isolate::kProtectorValid));
984     Goto(&exit);
985   }
986   BIND(&if_false);
987   {
988     var_result = Int32FalseConstant();
989     Goto(&exit);
990   }
991   BIND(&exit);
992   return var_result.value();
993 }
994 
BranchIfFastJSArray(Node * object,Node * context,Label * if_true,Label * if_false)995 void CodeStubAssembler::BranchIfFastJSArray(Node* object, Node* context,
996                                             Label* if_true, Label* if_false) {
997   GotoIfForceSlowPath(if_false);
998 
999   // Bailout if receiver is a Smi.
1000   GotoIf(TaggedIsSmi(object), if_false);
1001 
1002   Node* map = LoadMap(object);
1003   GotoIfNot(IsJSArrayMap(map), if_false);
1004 
1005   // Bailout if receiver has slow elements.
1006   Node* elements_kind = LoadMapElementsKind(map);
1007   GotoIfNot(IsFastElementsKind(elements_kind), if_false);
1008 
1009   // Verify that our prototype is the initial array prototype.
1010   GotoIfNot(IsPrototypeInitialArrayPrototype(context, map), if_false);
1011 
1012   Branch(IsNoElementsProtectorCellInvalid(), if_false, if_true);
1013 }
1014 
BranchIfFastJSArrayForCopy(Node * object,Node * context,Label * if_true,Label * if_false)1015 void CodeStubAssembler::BranchIfFastJSArrayForCopy(Node* object, Node* context,
1016                                                    Label* if_true,
1017                                                    Label* if_false) {
1018   GotoIf(IsArraySpeciesProtectorCellInvalid(), if_false);
1019   BranchIfFastJSArray(object, context, if_true, if_false);
1020 }
1021 
GotoIfForceSlowPath(Label * if_true)1022 void CodeStubAssembler::GotoIfForceSlowPath(Label* if_true) {
1023 #ifdef V8_ENABLE_FORCE_SLOW_PATH
1024   Node* const force_slow_path_addr =
1025       ExternalConstant(ExternalReference::force_slow_path(isolate()));
1026   Node* const force_slow = Load(MachineType::Uint8(), force_slow_path_addr);
1027 
1028   GotoIf(force_slow, if_true);
1029 #endif
1030 }
1031 
AllocateRaw(Node * size_in_bytes,AllocationFlags flags,Node * top_address,Node * limit_address)1032 Node* CodeStubAssembler::AllocateRaw(Node* size_in_bytes, AllocationFlags flags,
1033                                      Node* top_address, Node* limit_address) {
1034   // TODO(jgruber, chromium:848672): TNodeify AllocateRaw.
1035   // TODO(jgruber, chromium:848672): Call FatalProcessOutOfMemory if this fails.
1036   {
1037     intptr_t constant_value;
1038     if (ToIntPtrConstant(size_in_bytes, constant_value)) {
1039       CHECK(Internals::IsValidSmi(constant_value));
1040       CHECK_GT(constant_value, 0);
1041     } else {
1042       CSA_CHECK(this,
1043                 IsValidPositiveSmi(UncheckedCast<IntPtrT>(size_in_bytes)));
1044     }
1045   }
1046 
1047   Node* top = Load(MachineType::Pointer(), top_address);
1048   Node* limit = Load(MachineType::Pointer(), limit_address);
1049 
1050   // If there's not enough space, call the runtime.
1051   VARIABLE(result, MachineRepresentation::kTagged);
1052   Label runtime_call(this, Label::kDeferred), no_runtime_call(this);
1053   Label merge_runtime(this, &result);
1054 
1055   bool needs_double_alignment = flags & kDoubleAlignment;
1056 
1057   if (flags & kAllowLargeObjectAllocation) {
1058     Label next(this);
1059     GotoIf(IsRegularHeapObjectSize(size_in_bytes), &next);
1060 
1061     Node* runtime_flags = SmiConstant(
1062         Smi::FromInt(AllocateDoubleAlignFlag::encode(needs_double_alignment) |
1063                      AllocateTargetSpace::encode(AllocationSpace::LO_SPACE)));
1064     Node* const runtime_result =
1065         CallRuntime(Runtime::kAllocateInTargetSpace, NoContextConstant(),
1066                     SmiTag(size_in_bytes), runtime_flags);
1067     result.Bind(runtime_result);
1068     Goto(&merge_runtime);
1069 
1070     BIND(&next);
1071   }
1072 
1073   VARIABLE(adjusted_size, MachineType::PointerRepresentation(), size_in_bytes);
1074 
1075   if (needs_double_alignment) {
1076     Label not_aligned(this), done_alignment(this, &adjusted_size);
1077 
1078     Branch(WordAnd(top, IntPtrConstant(kDoubleAlignmentMask)), &not_aligned,
1079            &done_alignment);
1080 
1081     BIND(&not_aligned);
1082     Node* not_aligned_size = IntPtrAdd(size_in_bytes, IntPtrConstant(4));
1083     adjusted_size.Bind(not_aligned_size);
1084     Goto(&done_alignment);
1085 
1086     BIND(&done_alignment);
1087   }
1088 
1089   Node* new_top = IntPtrAdd(top, adjusted_size.value());
1090 
1091   Branch(UintPtrGreaterThanOrEqual(new_top, limit), &runtime_call,
1092          &no_runtime_call);
1093 
1094   BIND(&runtime_call);
1095   Node* runtime_result;
1096   if (flags & kPretenured) {
1097     Node* runtime_flags = SmiConstant(
1098         Smi::FromInt(AllocateDoubleAlignFlag::encode(needs_double_alignment) |
1099                      AllocateTargetSpace::encode(AllocationSpace::OLD_SPACE)));
1100     runtime_result =
1101         CallRuntime(Runtime::kAllocateInTargetSpace, NoContextConstant(),
1102                     SmiTag(size_in_bytes), runtime_flags);
1103   } else {
1104     runtime_result = CallRuntime(Runtime::kAllocateInNewSpace,
1105                                  NoContextConstant(), SmiTag(size_in_bytes));
1106   }
1107   result.Bind(runtime_result);
1108   Goto(&merge_runtime);
1109 
1110   // When there is enough space, return `top' and bump it up.
1111   BIND(&no_runtime_call);
1112   Node* no_runtime_result = top;
1113   StoreNoWriteBarrier(MachineType::PointerRepresentation(), top_address,
1114                       new_top);
1115 
1116   VARIABLE(address, MachineType::PointerRepresentation(), no_runtime_result);
1117 
1118   if (needs_double_alignment) {
1119     Label needs_filler(this), done_filling(this, &address);
1120     Branch(IntPtrEqual(adjusted_size.value(), size_in_bytes), &done_filling,
1121            &needs_filler);
1122 
1123     BIND(&needs_filler);
1124     // Store a filler and increase the address by kPointerSize.
1125     StoreNoWriteBarrier(MachineRepresentation::kTagged, top,
1126                         LoadRoot(Heap::kOnePointerFillerMapRootIndex));
1127     address.Bind(IntPtrAdd(no_runtime_result, IntPtrConstant(4)));
1128 
1129     Goto(&done_filling);
1130 
1131     BIND(&done_filling);
1132   }
1133 
1134   no_runtime_result = BitcastWordToTagged(
1135       IntPtrAdd(address.value(), IntPtrConstant(kHeapObjectTag)));
1136 
1137   result.Bind(no_runtime_result);
1138   Goto(&merge_runtime);
1139 
1140   BIND(&merge_runtime);
1141   return result.value();
1142 }
1143 
AllocateRawUnaligned(Node * size_in_bytes,AllocationFlags flags,Node * top_address,Node * limit_address)1144 Node* CodeStubAssembler::AllocateRawUnaligned(Node* size_in_bytes,
1145                                               AllocationFlags flags,
1146                                               Node* top_address,
1147                                               Node* limit_address) {
1148   DCHECK_EQ(flags & kDoubleAlignment, 0);
1149   return AllocateRaw(size_in_bytes, flags, top_address, limit_address);
1150 }
1151 
AllocateRawDoubleAligned(Node * size_in_bytes,AllocationFlags flags,Node * top_address,Node * limit_address)1152 Node* CodeStubAssembler::AllocateRawDoubleAligned(Node* size_in_bytes,
1153                                                   AllocationFlags flags,
1154                                                   Node* top_address,
1155                                                   Node* limit_address) {
1156 #if defined(V8_HOST_ARCH_32_BIT)
1157   return AllocateRaw(size_in_bytes, flags | kDoubleAlignment, top_address,
1158                      limit_address);
1159 #elif defined(V8_HOST_ARCH_64_BIT)
1160   // Allocation on 64 bit machine is naturally double aligned
1161   return AllocateRaw(size_in_bytes, flags & ~kDoubleAlignment, top_address,
1162                      limit_address);
1163 #else
1164 #error Architecture not supported
1165 #endif
1166 }
1167 
AllocateInNewSpace(Node * size_in_bytes,AllocationFlags flags)1168 Node* CodeStubAssembler::AllocateInNewSpace(Node* size_in_bytes,
1169                                             AllocationFlags flags) {
1170   DCHECK(flags == kNone || flags == kDoubleAlignment);
1171   CSA_ASSERT(this, IsRegularHeapObjectSize(size_in_bytes));
1172   return Allocate(size_in_bytes, flags);
1173 }
1174 
Allocate(Node * size_in_bytes,AllocationFlags flags)1175 Node* CodeStubAssembler::Allocate(Node* size_in_bytes, AllocationFlags flags) {
1176   Comment("Allocate");
1177   bool const new_space = !(flags & kPretenured);
1178   Node* top_address = ExternalConstant(
1179       new_space
1180           ? ExternalReference::new_space_allocation_top_address(isolate())
1181           : ExternalReference::old_space_allocation_top_address(isolate()));
1182   DCHECK_EQ(kPointerSize,
1183             ExternalReference::new_space_allocation_limit_address(isolate())
1184                     .address() -
1185                 ExternalReference::new_space_allocation_top_address(isolate())
1186                     .address());
1187   DCHECK_EQ(kPointerSize,
1188             ExternalReference::old_space_allocation_limit_address(isolate())
1189                     .address() -
1190                 ExternalReference::old_space_allocation_top_address(isolate())
1191                     .address());
1192   Node* limit_address = IntPtrAdd(top_address, IntPtrConstant(kPointerSize));
1193 
1194   if (flags & kDoubleAlignment) {
1195     return AllocateRawDoubleAligned(size_in_bytes, flags, top_address,
1196                                     limit_address);
1197   } else {
1198     return AllocateRawUnaligned(size_in_bytes, flags, top_address,
1199                                 limit_address);
1200   }
1201 }
1202 
AllocateInNewSpace(int size_in_bytes,AllocationFlags flags)1203 Node* CodeStubAssembler::AllocateInNewSpace(int size_in_bytes,
1204                                             AllocationFlags flags) {
1205   CHECK(flags == kNone || flags == kDoubleAlignment);
1206   DCHECK_LE(size_in_bytes, kMaxRegularHeapObjectSize);
1207   return CodeStubAssembler::Allocate(IntPtrConstant(size_in_bytes), flags);
1208 }
1209 
Allocate(int size_in_bytes,AllocationFlags flags)1210 Node* CodeStubAssembler::Allocate(int size_in_bytes, AllocationFlags flags) {
1211   return CodeStubAssembler::Allocate(IntPtrConstant(size_in_bytes), flags);
1212 }
1213 
InnerAllocate(Node * previous,Node * offset)1214 Node* CodeStubAssembler::InnerAllocate(Node* previous, Node* offset) {
1215   return BitcastWordToTagged(IntPtrAdd(BitcastTaggedToWord(previous), offset));
1216 }
1217 
InnerAllocate(Node * previous,int offset)1218 Node* CodeStubAssembler::InnerAllocate(Node* previous, int offset) {
1219   return InnerAllocate(previous, IntPtrConstant(offset));
1220 }
1221 
IsRegularHeapObjectSize(Node * size)1222 Node* CodeStubAssembler::IsRegularHeapObjectSize(Node* size) {
1223   return UintPtrLessThanOrEqual(size,
1224                                 IntPtrConstant(kMaxRegularHeapObjectSize));
1225 }
1226 
BranchIfToBooleanIsTrue(Node * value,Label * if_true,Label * if_false)1227 void CodeStubAssembler::BranchIfToBooleanIsTrue(Node* value, Label* if_true,
1228                                                 Label* if_false) {
1229   Label if_smi(this), if_notsmi(this), if_heapnumber(this, Label::kDeferred),
1230       if_bigint(this, Label::kDeferred);
1231   // Rule out false {value}.
1232   GotoIf(WordEqual(value, FalseConstant()), if_false);
1233 
1234   // Check if {value} is a Smi or a HeapObject.
1235   Branch(TaggedIsSmi(value), &if_smi, &if_notsmi);
1236 
1237   BIND(&if_smi);
1238   {
1239     // The {value} is a Smi, only need to check against zero.
1240     BranchIfSmiEqual(CAST(value), SmiConstant(0), if_false, if_true);
1241   }
1242 
1243   BIND(&if_notsmi);
1244   {
1245     // Check if {value} is the empty string.
1246     GotoIf(IsEmptyString(value), if_false);
1247 
1248     // The {value} is a HeapObject, load its map.
1249     Node* value_map = LoadMap(value);
1250 
1251     // Only null, undefined and document.all have the undetectable bit set,
1252     // so we can return false immediately when that bit is set.
1253     GotoIf(IsUndetectableMap(value_map), if_false);
1254 
1255     // We still need to handle numbers specially, but all other {value}s
1256     // that make it here yield true.
1257     GotoIf(IsHeapNumberMap(value_map), &if_heapnumber);
1258     Branch(IsBigInt(value), &if_bigint, if_true);
1259 
1260     BIND(&if_heapnumber);
1261     {
1262       // Load the floating point value of {value}.
1263       Node* value_value = LoadObjectField(value, HeapNumber::kValueOffset,
1264                                           MachineType::Float64());
1265 
1266       // Check if the floating point {value} is neither 0.0, -0.0 nor NaN.
1267       Branch(Float64LessThan(Float64Constant(0.0), Float64Abs(value_value)),
1268              if_true, if_false);
1269     }
1270 
1271     BIND(&if_bigint);
1272     {
1273       Node* result =
1274           CallRuntime(Runtime::kBigIntToBoolean, NoContextConstant(), value);
1275       CSA_ASSERT(this, IsBoolean(result));
1276       Branch(WordEqual(result, TrueConstant()), if_true, if_false);
1277     }
1278   }
1279 }
1280 
LoadFromFrame(int offset,MachineType rep)1281 Node* CodeStubAssembler::LoadFromFrame(int offset, MachineType rep) {
1282   Node* frame_pointer = LoadFramePointer();
1283   return Load(rep, frame_pointer, IntPtrConstant(offset));
1284 }
1285 
LoadFromParentFrame(int offset,MachineType rep)1286 Node* CodeStubAssembler::LoadFromParentFrame(int offset, MachineType rep) {
1287   Node* frame_pointer = LoadParentFramePointer();
1288   return Load(rep, frame_pointer, IntPtrConstant(offset));
1289 }
1290 
LoadBufferObject(Node * buffer,int offset,MachineType rep)1291 Node* CodeStubAssembler::LoadBufferObject(Node* buffer, int offset,
1292                                           MachineType rep) {
1293   return Load(rep, buffer, IntPtrConstant(offset));
1294 }
1295 
LoadObjectField(SloppyTNode<HeapObject> object,int offset,MachineType rep)1296 Node* CodeStubAssembler::LoadObjectField(SloppyTNode<HeapObject> object,
1297                                          int offset, MachineType rep) {
1298   CSA_ASSERT(this, IsStrongHeapObject(object));
1299   return Load(rep, object, IntPtrConstant(offset - kHeapObjectTag));
1300 }
1301 
LoadObjectField(SloppyTNode<HeapObject> object,SloppyTNode<IntPtrT> offset,MachineType rep)1302 Node* CodeStubAssembler::LoadObjectField(SloppyTNode<HeapObject> object,
1303                                          SloppyTNode<IntPtrT> offset,
1304                                          MachineType rep) {
1305   CSA_ASSERT(this, IsStrongHeapObject(object));
1306   return Load(rep, object, IntPtrSub(offset, IntPtrConstant(kHeapObjectTag)));
1307 }
1308 
LoadAndUntagObjectField(SloppyTNode<HeapObject> object,int offset)1309 TNode<IntPtrT> CodeStubAssembler::LoadAndUntagObjectField(
1310     SloppyTNode<HeapObject> object, int offset) {
1311   if (Is64()) {
1312 #if V8_TARGET_LITTLE_ENDIAN
1313     offset += kPointerSize / 2;
1314 #endif
1315     return ChangeInt32ToIntPtr(
1316         LoadObjectField(object, offset, MachineType::Int32()));
1317   } else {
1318     return SmiToIntPtr(
1319         LoadObjectField(object, offset, MachineType::AnyTagged()));
1320   }
1321 }
1322 
LoadAndUntagToWord32ObjectField(Node * object,int offset)1323 TNode<Int32T> CodeStubAssembler::LoadAndUntagToWord32ObjectField(Node* object,
1324                                                                  int offset) {
1325   if (Is64()) {
1326 #if V8_TARGET_LITTLE_ENDIAN
1327     offset += kPointerSize / 2;
1328 #endif
1329     return UncheckedCast<Int32T>(
1330         LoadObjectField(object, offset, MachineType::Int32()));
1331   } else {
1332     return SmiToInt32(
1333         LoadObjectField(object, offset, MachineType::AnyTagged()));
1334   }
1335 }
1336 
LoadAndUntagSmi(Node * base,int index)1337 TNode<IntPtrT> CodeStubAssembler::LoadAndUntagSmi(Node* base, int index) {
1338   if (Is64()) {
1339 #if V8_TARGET_LITTLE_ENDIAN
1340     index += kPointerSize / 2;
1341 #endif
1342     return ChangeInt32ToIntPtr(
1343         Load(MachineType::Int32(), base, IntPtrConstant(index)));
1344   } else {
1345     return SmiToIntPtr(
1346         Load(MachineType::AnyTagged(), base, IntPtrConstant(index)));
1347   }
1348 }
1349 
LoadAndUntagToWord32Root(Heap::RootListIndex root_index)1350 TNode<Int32T> CodeStubAssembler::LoadAndUntagToWord32Root(
1351     Heap::RootListIndex root_index) {
1352   Node* roots_array_start =
1353       ExternalConstant(ExternalReference::roots_array_start(isolate()));
1354   int index = root_index * kPointerSize;
1355   if (Is64()) {
1356 #if V8_TARGET_LITTLE_ENDIAN
1357     index += kPointerSize / 2;
1358 #endif
1359     return UncheckedCast<Int32T>(
1360         Load(MachineType::Int32(), roots_array_start, IntPtrConstant(index)));
1361   } else {
1362     return SmiToInt32(Load(MachineType::AnyTagged(), roots_array_start,
1363                            IntPtrConstant(index)));
1364   }
1365 }
1366 
StoreAndTagSmi(Node * base,int offset,Node * value)1367 Node* CodeStubAssembler::StoreAndTagSmi(Node* base, int offset, Node* value) {
1368   if (Is64()) {
1369     int zero_offset = offset + kPointerSize / 2;
1370     int payload_offset = offset;
1371 #if V8_TARGET_LITTLE_ENDIAN
1372     std::swap(zero_offset, payload_offset);
1373 #endif
1374     StoreNoWriteBarrier(MachineRepresentation::kWord32, base,
1375                         IntPtrConstant(zero_offset), Int32Constant(0));
1376     return StoreNoWriteBarrier(MachineRepresentation::kWord32, base,
1377                                IntPtrConstant(payload_offset),
1378                                TruncateInt64ToInt32(value));
1379   } else {
1380     return StoreNoWriteBarrier(MachineRepresentation::kTaggedSigned, base,
1381                                IntPtrConstant(offset), SmiTag(value));
1382   }
1383 }
1384 
LoadHeapNumberValue(SloppyTNode<HeapNumber> object)1385 TNode<Float64T> CodeStubAssembler::LoadHeapNumberValue(
1386     SloppyTNode<HeapNumber> object) {
1387   return TNode<Float64T>::UncheckedCast(LoadObjectField(
1388       object, HeapNumber::kValueOffset, MachineType::Float64()));
1389 }
1390 
LoadMap(SloppyTNode<HeapObject> object)1391 TNode<Map> CodeStubAssembler::LoadMap(SloppyTNode<HeapObject> object) {
1392   return UncheckedCast<Map>(LoadObjectField(object, HeapObject::kMapOffset));
1393 }
1394 
LoadInstanceType(SloppyTNode<HeapObject> object)1395 TNode<Int32T> CodeStubAssembler::LoadInstanceType(
1396     SloppyTNode<HeapObject> object) {
1397   return LoadMapInstanceType(LoadMap(object));
1398 }
1399 
HasInstanceType(SloppyTNode<HeapObject> object,InstanceType instance_type)1400 TNode<BoolT> CodeStubAssembler::HasInstanceType(SloppyTNode<HeapObject> object,
1401                                                 InstanceType instance_type) {
1402   return InstanceTypeEqual(LoadInstanceType(object), instance_type);
1403 }
1404 
DoesntHaveInstanceType(SloppyTNode<HeapObject> object,InstanceType instance_type)1405 TNode<BoolT> CodeStubAssembler::DoesntHaveInstanceType(
1406     SloppyTNode<HeapObject> object, InstanceType instance_type) {
1407   return Word32NotEqual(LoadInstanceType(object), Int32Constant(instance_type));
1408 }
1409 
TaggedDoesntHaveInstanceType(SloppyTNode<HeapObject> any_tagged,InstanceType type)1410 TNode<BoolT> CodeStubAssembler::TaggedDoesntHaveInstanceType(
1411     SloppyTNode<HeapObject> any_tagged, InstanceType type) {
1412   /* return Phi <TaggedIsSmi(val), DoesntHaveInstanceType(val, type)> */
1413   TNode<BoolT> tagged_is_smi = TaggedIsSmi(any_tagged);
1414   return Select<BoolT>(
1415       tagged_is_smi, [=]() { return tagged_is_smi; },
1416       [=]() { return DoesntHaveInstanceType(any_tagged, type); });
1417 }
1418 
LoadFastProperties(SloppyTNode<JSObject> object)1419 TNode<HeapObject> CodeStubAssembler::LoadFastProperties(
1420     SloppyTNode<JSObject> object) {
1421   CSA_SLOW_ASSERT(this, Word32Not(IsDictionaryMap(LoadMap(object))));
1422   TNode<Object> properties =
1423       LoadObjectField(object, JSObject::kPropertiesOrHashOffset);
1424   return Select<HeapObject>(TaggedIsSmi(properties),
1425                             [=] { return EmptyFixedArrayConstant(); },
1426                             [=] { return CAST(properties); });
1427 }
1428 
LoadSlowProperties(SloppyTNode<JSObject> object)1429 TNode<HeapObject> CodeStubAssembler::LoadSlowProperties(
1430     SloppyTNode<JSObject> object) {
1431   CSA_SLOW_ASSERT(this, IsDictionaryMap(LoadMap(object)));
1432   TNode<Object> properties =
1433       LoadObjectField(object, JSObject::kPropertiesOrHashOffset);
1434   return Select<HeapObject>(TaggedIsSmi(properties),
1435                             [=] { return EmptyPropertyDictionaryConstant(); },
1436                             [=] { return CAST(properties); });
1437 }
1438 
LoadElements(SloppyTNode<JSObject> object)1439 TNode<FixedArrayBase> CodeStubAssembler::LoadElements(
1440     SloppyTNode<JSObject> object) {
1441   return CAST(LoadObjectField(object, JSObject::kElementsOffset));
1442 }
1443 
LoadJSArrayLength(SloppyTNode<JSArray> array)1444 TNode<Number> CodeStubAssembler::LoadJSArrayLength(SloppyTNode<JSArray> array) {
1445   CSA_ASSERT(this, IsJSArray(array));
1446   return CAST(LoadObjectField(array, JSArray::kLengthOffset));
1447 }
1448 
LoadFastJSArrayLength(SloppyTNode<JSArray> array)1449 TNode<Smi> CodeStubAssembler::LoadFastJSArrayLength(
1450     SloppyTNode<JSArray> array) {
1451   TNode<Object> length = LoadJSArrayLength(array);
1452   CSA_ASSERT(this, IsFastElementsKind(LoadMapElementsKind(LoadMap(array))));
1453   // JSArray length is always a positive Smi for fast arrays.
1454   CSA_SLOW_ASSERT(this, TaggedIsPositiveSmi(length));
1455   return UncheckedCast<Smi>(length);
1456 }
1457 
LoadFixedArrayBaseLength(SloppyTNode<FixedArrayBase> array)1458 TNode<Smi> CodeStubAssembler::LoadFixedArrayBaseLength(
1459     SloppyTNode<FixedArrayBase> array) {
1460   CSA_SLOW_ASSERT(this, IsNotWeakFixedArraySubclass(array));
1461   return CAST(LoadObjectField(array, FixedArrayBase::kLengthOffset));
1462 }
1463 
LoadAndUntagFixedArrayBaseLength(SloppyTNode<FixedArrayBase> array)1464 TNode<IntPtrT> CodeStubAssembler::LoadAndUntagFixedArrayBaseLength(
1465     SloppyTNode<FixedArrayBase> array) {
1466   return LoadAndUntagObjectField(array, FixedArrayBase::kLengthOffset);
1467 }
1468 
LoadFeedbackVectorLength(TNode<FeedbackVector> vector)1469 TNode<IntPtrT> CodeStubAssembler::LoadFeedbackVectorLength(
1470     TNode<FeedbackVector> vector) {
1471   return ChangeInt32ToIntPtr(
1472       LoadObjectField<Int32T>(vector, FeedbackVector::kLengthOffset));
1473 }
1474 
LoadWeakFixedArrayLength(TNode<WeakFixedArray> array)1475 TNode<Smi> CodeStubAssembler::LoadWeakFixedArrayLength(
1476     TNode<WeakFixedArray> array) {
1477   return CAST(LoadObjectField(array, WeakFixedArray::kLengthOffset));
1478 }
1479 
LoadAndUntagWeakFixedArrayLength(SloppyTNode<WeakFixedArray> array)1480 TNode<IntPtrT> CodeStubAssembler::LoadAndUntagWeakFixedArrayLength(
1481     SloppyTNode<WeakFixedArray> array) {
1482   return LoadAndUntagObjectField(array, WeakFixedArray::kLengthOffset);
1483 }
1484 
LoadMapBitField(SloppyTNode<Map> map)1485 TNode<Int32T> CodeStubAssembler::LoadMapBitField(SloppyTNode<Map> map) {
1486   CSA_SLOW_ASSERT(this, IsMap(map));
1487   return UncheckedCast<Int32T>(
1488       LoadObjectField(map, Map::kBitFieldOffset, MachineType::Uint8()));
1489 }
1490 
LoadMapBitField2(SloppyTNode<Map> map)1491 TNode<Int32T> CodeStubAssembler::LoadMapBitField2(SloppyTNode<Map> map) {
1492   CSA_SLOW_ASSERT(this, IsMap(map));
1493   return UncheckedCast<Int32T>(
1494       LoadObjectField(map, Map::kBitField2Offset, MachineType::Uint8()));
1495 }
1496 
LoadMapBitField3(SloppyTNode<Map> map)1497 TNode<Uint32T> CodeStubAssembler::LoadMapBitField3(SloppyTNode<Map> map) {
1498   CSA_SLOW_ASSERT(this, IsMap(map));
1499   return UncheckedCast<Uint32T>(
1500       LoadObjectField(map, Map::kBitField3Offset, MachineType::Uint32()));
1501 }
1502 
LoadMapInstanceType(SloppyTNode<Map> map)1503 TNode<Int32T> CodeStubAssembler::LoadMapInstanceType(SloppyTNode<Map> map) {
1504   return UncheckedCast<Int32T>(
1505       LoadObjectField(map, Map::kInstanceTypeOffset, MachineType::Uint16()));
1506 }
1507 
LoadMapElementsKind(SloppyTNode<Map> map)1508 TNode<Int32T> CodeStubAssembler::LoadMapElementsKind(SloppyTNode<Map> map) {
1509   CSA_SLOW_ASSERT(this, IsMap(map));
1510   Node* bit_field2 = LoadMapBitField2(map);
1511   return Signed(DecodeWord32<Map::ElementsKindBits>(bit_field2));
1512 }
1513 
LoadMapDescriptors(SloppyTNode<Map> map)1514 TNode<DescriptorArray> CodeStubAssembler::LoadMapDescriptors(
1515     SloppyTNode<Map> map) {
1516   CSA_SLOW_ASSERT(this, IsMap(map));
1517   return CAST(LoadObjectField(map, Map::kDescriptorsOffset));
1518 }
1519 
LoadMapPrototype(SloppyTNode<Map> map)1520 TNode<HeapObject> CodeStubAssembler::LoadMapPrototype(SloppyTNode<Map> map) {
1521   CSA_SLOW_ASSERT(this, IsMap(map));
1522   return CAST(LoadObjectField(map, Map::kPrototypeOffset));
1523 }
1524 
LoadMapPrototypeInfo(SloppyTNode<Map> map,Label * if_no_proto_info)1525 TNode<PrototypeInfo> CodeStubAssembler::LoadMapPrototypeInfo(
1526     SloppyTNode<Map> map, Label* if_no_proto_info) {
1527   Label if_strong_heap_object(this);
1528   CSA_ASSERT(this, IsMap(map));
1529   TNode<MaybeObject> maybe_prototype_info =
1530       LoadMaybeWeakObjectField(map, Map::kTransitionsOrPrototypeInfoOffset);
1531   TVARIABLE(Object, prototype_info);
1532   DispatchMaybeObject(maybe_prototype_info, if_no_proto_info, if_no_proto_info,
1533                       if_no_proto_info, &if_strong_heap_object,
1534                       &prototype_info);
1535 
1536   BIND(&if_strong_heap_object);
1537   GotoIfNot(WordEqual(LoadMap(CAST(prototype_info.value())),
1538                       LoadRoot(Heap::kPrototypeInfoMapRootIndex)),
1539             if_no_proto_info);
1540   return CAST(prototype_info.value());
1541 }
1542 
LoadMapInstanceSizeInWords(SloppyTNode<Map> map)1543 TNode<IntPtrT> CodeStubAssembler::LoadMapInstanceSizeInWords(
1544     SloppyTNode<Map> map) {
1545   CSA_SLOW_ASSERT(this, IsMap(map));
1546   return ChangeInt32ToIntPtr(LoadObjectField(
1547       map, Map::kInstanceSizeInWordsOffset, MachineType::Uint8()));
1548 }
1549 
LoadMapInobjectPropertiesStartInWords(SloppyTNode<Map> map)1550 TNode<IntPtrT> CodeStubAssembler::LoadMapInobjectPropertiesStartInWords(
1551     SloppyTNode<Map> map) {
1552   CSA_SLOW_ASSERT(this, IsMap(map));
1553   // See Map::GetInObjectPropertiesStartInWords() for details.
1554   CSA_ASSERT(this, IsJSObjectMap(map));
1555   return ChangeInt32ToIntPtr(LoadObjectField(
1556       map, Map::kInObjectPropertiesStartOrConstructorFunctionIndexOffset,
1557       MachineType::Uint8()));
1558 }
1559 
LoadMapConstructorFunctionIndex(SloppyTNode<Map> map)1560 TNode<IntPtrT> CodeStubAssembler::LoadMapConstructorFunctionIndex(
1561     SloppyTNode<Map> map) {
1562   CSA_SLOW_ASSERT(this, IsMap(map));
1563   // See Map::GetConstructorFunctionIndex() for details.
1564   CSA_ASSERT(this, IsPrimitiveInstanceType(LoadMapInstanceType(map)));
1565   return ChangeInt32ToIntPtr(LoadObjectField(
1566       map, Map::kInObjectPropertiesStartOrConstructorFunctionIndexOffset,
1567       MachineType::Uint8()));
1568 }
1569 
LoadMapConstructor(SloppyTNode<Map> map)1570 TNode<Object> CodeStubAssembler::LoadMapConstructor(SloppyTNode<Map> map) {
1571   CSA_SLOW_ASSERT(this, IsMap(map));
1572   TVARIABLE(Object, result,
1573             LoadObjectField(map, Map::kConstructorOrBackPointerOffset));
1574 
1575   Label done(this), loop(this, &result);
1576   Goto(&loop);
1577   BIND(&loop);
1578   {
1579     GotoIf(TaggedIsSmi(result.value()), &done);
1580     Node* is_map_type =
1581         InstanceTypeEqual(LoadInstanceType(CAST(result.value())), MAP_TYPE);
1582     GotoIfNot(is_map_type, &done);
1583     result = LoadObjectField(CAST(result.value()),
1584                              Map::kConstructorOrBackPointerOffset);
1585     Goto(&loop);
1586   }
1587   BIND(&done);
1588   return result.value();
1589 }
1590 
LoadMapEnumLength(SloppyTNode<Map> map)1591 Node* CodeStubAssembler::LoadMapEnumLength(SloppyTNode<Map> map) {
1592   CSA_SLOW_ASSERT(this, IsMap(map));
1593   Node* bit_field3 = LoadMapBitField3(map);
1594   return DecodeWordFromWord32<Map::EnumLengthBits>(bit_field3);
1595 }
1596 
LoadMapBackPointer(SloppyTNode<Map> map)1597 TNode<Object> CodeStubAssembler::LoadMapBackPointer(SloppyTNode<Map> map) {
1598   TNode<HeapObject> object =
1599       CAST(LoadObjectField(map, Map::kConstructorOrBackPointerOffset));
1600   return Select<Object>(IsMap(object), [=] { return object; },
1601                         [=] { return UndefinedConstant(); });
1602 }
1603 
LoadJSReceiverIdentityHash(SloppyTNode<Object> receiver,Label * if_no_hash)1604 TNode<IntPtrT> CodeStubAssembler::LoadJSReceiverIdentityHash(
1605     SloppyTNode<Object> receiver, Label* if_no_hash) {
1606   TVARIABLE(IntPtrT, var_hash);
1607   Label done(this), if_smi(this), if_property_array(this),
1608       if_property_dictionary(this), if_fixed_array(this);
1609 
1610   TNode<Object> properties_or_hash =
1611       LoadObjectField(TNode<HeapObject>::UncheckedCast(receiver),
1612                       JSReceiver::kPropertiesOrHashOffset);
1613   GotoIf(TaggedIsSmi(properties_or_hash), &if_smi);
1614 
1615   TNode<HeapObject> properties =
1616       TNode<HeapObject>::UncheckedCast(properties_or_hash);
1617   TNode<Int32T> properties_instance_type = LoadInstanceType(properties);
1618 
1619   GotoIf(InstanceTypeEqual(properties_instance_type, PROPERTY_ARRAY_TYPE),
1620          &if_property_array);
1621   Branch(InstanceTypeEqual(properties_instance_type, HASH_TABLE_TYPE),
1622          &if_property_dictionary, &if_fixed_array);
1623 
1624   BIND(&if_fixed_array);
1625   {
1626     var_hash = IntPtrConstant(PropertyArray::kNoHashSentinel);
1627     Goto(&done);
1628   }
1629 
1630   BIND(&if_smi);
1631   {
1632     var_hash = SmiUntag(TNode<Smi>::UncheckedCast(properties_or_hash));
1633     Goto(&done);
1634   }
1635 
1636   BIND(&if_property_array);
1637   {
1638     TNode<IntPtrT> length_and_hash = LoadAndUntagObjectField(
1639         properties, PropertyArray::kLengthAndHashOffset);
1640     var_hash = TNode<IntPtrT>::UncheckedCast(
1641         DecodeWord<PropertyArray::HashField>(length_and_hash));
1642     Goto(&done);
1643   }
1644 
1645   BIND(&if_property_dictionary);
1646   {
1647     var_hash = SmiUntag(CAST(
1648         LoadFixedArrayElement(properties, NameDictionary::kObjectHashIndex)));
1649     Goto(&done);
1650   }
1651 
1652   BIND(&done);
1653   if (if_no_hash != nullptr) {
1654     GotoIf(IntPtrEqual(var_hash.value(),
1655                        IntPtrConstant(PropertyArray::kNoHashSentinel)),
1656            if_no_hash);
1657   }
1658   return var_hash.value();
1659 }
1660 
LoadNameHashField(SloppyTNode<Name> name)1661 TNode<Uint32T> CodeStubAssembler::LoadNameHashField(SloppyTNode<Name> name) {
1662   CSA_ASSERT(this, IsName(name));
1663   return LoadObjectField<Uint32T>(name, Name::kHashFieldOffset);
1664 }
1665 
LoadNameHash(SloppyTNode<Name> name,Label * if_hash_not_computed)1666 TNode<Uint32T> CodeStubAssembler::LoadNameHash(SloppyTNode<Name> name,
1667                                                Label* if_hash_not_computed) {
1668   TNode<Uint32T> hash_field = LoadNameHashField(name);
1669   if (if_hash_not_computed != nullptr) {
1670     GotoIf(IsSetWord32(hash_field, Name::kHashNotComputedMask),
1671            if_hash_not_computed);
1672   }
1673   return Unsigned(Word32Shr(hash_field, Int32Constant(Name::kHashShift)));
1674 }
1675 
LoadStringLengthAsWord(SloppyTNode<String> object)1676 TNode<IntPtrT> CodeStubAssembler::LoadStringLengthAsWord(
1677     SloppyTNode<String> object) {
1678   return SmiUntag(LoadStringLengthAsSmi(object));
1679 }
1680 
LoadStringLengthAsSmi(SloppyTNode<String> object)1681 TNode<Smi> CodeStubAssembler::LoadStringLengthAsSmi(
1682     SloppyTNode<String> object) {
1683   CSA_ASSERT(this, IsString(object));
1684   return CAST(LoadObjectField(object, String::kLengthOffset,
1685                               MachineType::TaggedPointer()));
1686 }
1687 
PointerToSeqStringData(Node * seq_string)1688 Node* CodeStubAssembler::PointerToSeqStringData(Node* seq_string) {
1689   CSA_ASSERT(this, IsString(seq_string));
1690   CSA_ASSERT(this,
1691              IsSequentialStringInstanceType(LoadInstanceType(seq_string)));
1692   STATIC_ASSERT(SeqOneByteString::kHeaderSize == SeqTwoByteString::kHeaderSize);
1693   return IntPtrAdd(
1694       BitcastTaggedToWord(seq_string),
1695       IntPtrConstant(SeqOneByteString::kHeaderSize - kHeapObjectTag));
1696 }
1697 
LoadJSValueValue(Node * object)1698 Node* CodeStubAssembler::LoadJSValueValue(Node* object) {
1699   CSA_ASSERT(this, IsJSValue(object));
1700   return LoadObjectField(object, JSValue::kValueOffset);
1701 }
1702 
LoadWeakCellValueUnchecked(SloppyTNode<HeapObject> weak_cell)1703 TNode<Object> CodeStubAssembler::LoadWeakCellValueUnchecked(
1704     SloppyTNode<HeapObject> weak_cell) {
1705   CSA_ASSERT(this, IsStrongHeapObject(weak_cell));
1706   // TODO(ishell): fix callers.
1707   return LoadObjectField(weak_cell, WeakCell::kValueOffset);
1708 }
1709 
LoadWeakCellValue(SloppyTNode<WeakCell> weak_cell,Label * if_cleared)1710 TNode<Object> CodeStubAssembler::LoadWeakCellValue(
1711     SloppyTNode<WeakCell> weak_cell, Label* if_cleared) {
1712   CSA_ASSERT(this, IsWeakCell(weak_cell));
1713   TNode<Object> value = LoadWeakCellValueUnchecked(weak_cell);
1714   if (if_cleared != nullptr) {
1715     GotoIf(WordEqual(value, IntPtrConstant(0)), if_cleared);
1716   }
1717   return value;
1718 }
1719 
DispatchMaybeObject(TNode<MaybeObject> maybe_object,Label * if_smi,Label * if_cleared,Label * if_weak,Label * if_strong,TVariable<Object> * extracted)1720 void CodeStubAssembler::DispatchMaybeObject(TNode<MaybeObject> maybe_object,
1721                                             Label* if_smi, Label* if_cleared,
1722                                             Label* if_weak, Label* if_strong,
1723                                             TVariable<Object>* extracted) {
1724   Label inner_if_smi(this), inner_if_strong(this);
1725 
1726   GotoIf(TaggedIsSmi(maybe_object), &inner_if_smi);
1727 
1728   GotoIf(WordEqual(BitcastMaybeObjectToWord(maybe_object),
1729                    IntPtrConstant(reinterpret_cast<intptr_t>(
1730                        HeapObjectReference::ClearedValue()))),
1731          if_cleared);
1732 
1733   GotoIf(WordEqual(WordAnd(BitcastMaybeObjectToWord(maybe_object),
1734                            IntPtrConstant(kHeapObjectTagMask)),
1735                    IntPtrConstant(kHeapObjectTag)),
1736          &inner_if_strong);
1737 
1738   *extracted =
1739       BitcastWordToTagged(WordAnd(BitcastMaybeObjectToWord(maybe_object),
1740                                   IntPtrConstant(~kWeakHeapObjectMask)));
1741   Goto(if_weak);
1742 
1743   BIND(&inner_if_smi);
1744   *extracted = ToObject(maybe_object);
1745   Goto(if_smi);
1746 
1747   BIND(&inner_if_strong);
1748   *extracted = ToObject(maybe_object);
1749   Goto(if_strong);
1750 }
1751 
IsStrongHeapObject(TNode<MaybeObject> value)1752 TNode<BoolT> CodeStubAssembler::IsStrongHeapObject(TNode<MaybeObject> value) {
1753   return WordEqual(WordAnd(BitcastMaybeObjectToWord(value),
1754                            IntPtrConstant(kHeapObjectTagMask)),
1755                    IntPtrConstant(kHeapObjectTag));
1756 }
1757 
ToStrongHeapObject(TNode<MaybeObject> value)1758 TNode<HeapObject> CodeStubAssembler::ToStrongHeapObject(
1759     TNode<MaybeObject> value) {
1760   CSA_ASSERT(this, IsStrongHeapObject(value));
1761   return ReinterpretCast<HeapObject>(value);
1762 }
1763 
IsWeakOrClearedHeapObject(TNode<MaybeObject> value)1764 TNode<BoolT> CodeStubAssembler::IsWeakOrClearedHeapObject(
1765     TNode<MaybeObject> value) {
1766   return WordEqual(WordAnd(BitcastMaybeObjectToWord(value),
1767                            IntPtrConstant(kHeapObjectTagMask)),
1768                    IntPtrConstant(kWeakHeapObjectTag));
1769 }
1770 
IsClearedWeakHeapObject(TNode<MaybeObject> value)1771 TNode<BoolT> CodeStubAssembler::IsClearedWeakHeapObject(
1772     TNode<MaybeObject> value) {
1773   return WordEqual(BitcastMaybeObjectToWord(value),
1774                    IntPtrConstant(kClearedWeakHeapObject));
1775 }
1776 
IsNotClearedWeakHeapObject(TNode<MaybeObject> value)1777 TNode<BoolT> CodeStubAssembler::IsNotClearedWeakHeapObject(
1778     TNode<MaybeObject> value) {
1779   return WordNotEqual(BitcastMaybeObjectToWord(value),
1780                       IntPtrConstant(kClearedWeakHeapObject));
1781 }
1782 
ToWeakHeapObject(TNode<MaybeObject> value)1783 TNode<HeapObject> CodeStubAssembler::ToWeakHeapObject(
1784     TNode<MaybeObject> value) {
1785   CSA_ASSERT(this, IsWeakOrClearedHeapObject(value));
1786   CSA_ASSERT(this, IsNotClearedWeakHeapObject(value));
1787   return UncheckedCast<HeapObject>(BitcastWordToTagged(WordAnd(
1788       BitcastMaybeObjectToWord(value), IntPtrConstant(~kWeakHeapObjectMask))));
1789 }
1790 
ToWeakHeapObject(TNode<MaybeObject> value,Label * if_cleared)1791 TNode<HeapObject> CodeStubAssembler::ToWeakHeapObject(TNode<MaybeObject> value,
1792                                                       Label* if_cleared) {
1793   GotoIf(IsClearedWeakHeapObject(value), if_cleared);
1794   return ToWeakHeapObject(value);
1795 }
1796 
IsObject(TNode<MaybeObject> value)1797 TNode<BoolT> CodeStubAssembler::IsObject(TNode<MaybeObject> value) {
1798   return WordNotEqual(WordAnd(BitcastMaybeObjectToWord(value),
1799                               IntPtrConstant(kHeapObjectTagMask)),
1800                       IntPtrConstant(kWeakHeapObjectTag));
1801 }
1802 
ToObject(TNode<MaybeObject> value)1803 TNode<Object> CodeStubAssembler::ToObject(TNode<MaybeObject> value) {
1804   // TODO(marja): Make CAST work (with the appropriate check); replace this with
1805   // CAST.
1806   CSA_ASSERT(this, IsObject(value));
1807   return ReinterpretCast<Object>(value);
1808 }
1809 
MakeWeak(TNode<HeapObject> value)1810 TNode<MaybeObject> CodeStubAssembler::MakeWeak(TNode<HeapObject> value) {
1811   return ReinterpretCast<MaybeObject>(BitcastWordToTagged(
1812       WordOr(BitcastTaggedToWord(value), IntPtrConstant(kWeakHeapObjectTag))));
1813 }
1814 
LoadArrayElement(SloppyTNode<HeapObject> array,int array_header_size,Node * index_node,int additional_offset,ParameterMode parameter_mode,LoadSensitivity needs_poisoning)1815 TNode<MaybeObject> CodeStubAssembler::LoadArrayElement(
1816     SloppyTNode<HeapObject> array, int array_header_size, Node* index_node,
1817     int additional_offset, ParameterMode parameter_mode,
1818     LoadSensitivity needs_poisoning) {
1819   CSA_SLOW_ASSERT(this, IntPtrGreaterThanOrEqual(
1820                             ParameterToIntPtr(index_node, parameter_mode),
1821                             IntPtrConstant(0)));
1822   DCHECK_EQ(additional_offset % kPointerSize, 0);
1823   int32_t header_size = array_header_size + additional_offset - kHeapObjectTag;
1824   TNode<IntPtrT> offset = ElementOffsetFromIndex(index_node, HOLEY_ELEMENTS,
1825                                                  parameter_mode, header_size);
1826   STATIC_ASSERT(FixedArrayBase::kLengthOffset == WeakFixedArray::kLengthOffset);
1827   STATIC_ASSERT(FixedArrayBase::kLengthOffset ==
1828                 PropertyArray::kLengthAndHashOffset);
1829   // Check that index_node + additional_offset <= object.length.
1830   // TODO(cbruni): Use proper LoadXXLength helpers
1831   CSA_SLOW_ASSERT(
1832       this,
1833       IsOffsetInBounds(
1834           offset,
1835           Select<IntPtrT>(
1836               IsPropertyArray(array),
1837               [=] {
1838                 TNode<IntPtrT> length_and_hash = LoadAndUntagObjectField(
1839                     array, PropertyArray::kLengthAndHashOffset);
1840                 return TNode<IntPtrT>::UncheckedCast(
1841                     DecodeWord<PropertyArray::LengthField>(length_and_hash));
1842               },
1843               [=] {
1844                 return LoadAndUntagObjectField(array,
1845                                                FixedArrayBase::kLengthOffset);
1846               }),
1847           FixedArray::kHeaderSize));
1848   return UncheckedCast<MaybeObject>(
1849       Load(MachineType::AnyTagged(), array, offset, needs_poisoning));
1850 }
1851 
LoadFixedArrayElement(SloppyTNode<HeapObject> object,Node * index_node,int additional_offset,ParameterMode parameter_mode,LoadSensitivity needs_poisoning)1852 TNode<Object> CodeStubAssembler::LoadFixedArrayElement(
1853     SloppyTNode<HeapObject> object, Node* index_node, int additional_offset,
1854     ParameterMode parameter_mode, LoadSensitivity needs_poisoning) {
1855   // This function is currently used for non-FixedArrays (e.g., PropertyArrays)
1856   // and thus the reasonable assert IsFixedArraySubclass(object) is
1857   // untrue. TODO(marja): Fix.
1858   CSA_ASSERT(this, IsNotWeakFixedArraySubclass(object));
1859   TNode<MaybeObject> element =
1860       LoadArrayElement(object, FixedArray::kHeaderSize, index_node,
1861                        additional_offset, parameter_mode, needs_poisoning);
1862   CSA_ASSERT(this, IsObject(element));
1863   return ToObject(element);
1864 }
1865 
LoadPropertyArrayElement(SloppyTNode<PropertyArray> object,SloppyTNode<IntPtrT> index)1866 TNode<Object> CodeStubAssembler::LoadPropertyArrayElement(
1867     SloppyTNode<PropertyArray> object, SloppyTNode<IntPtrT> index) {
1868   int additional_offset = 0;
1869   ParameterMode parameter_mode = INTPTR_PARAMETERS;
1870   LoadSensitivity needs_poisoning = LoadSensitivity::kSafe;
1871   STATIC_ASSERT(PropertyArray::kHeaderSize == FixedArray::kHeaderSize);
1872 
1873   return ToObject(LoadArrayElement(object, PropertyArray::kHeaderSize, index,
1874                                    additional_offset, parameter_mode,
1875                                    needs_poisoning));
1876 }
1877 
LoadFixedTypedArrayBackingStore(TNode<FixedTypedArrayBase> typed_array)1878 TNode<RawPtrT> CodeStubAssembler::LoadFixedTypedArrayBackingStore(
1879     TNode<FixedTypedArrayBase> typed_array) {
1880   // Backing store = external_pointer + base_pointer.
1881   Node* external_pointer =
1882       LoadObjectField(typed_array, FixedTypedArrayBase::kExternalPointerOffset,
1883                       MachineType::Pointer());
1884   Node* base_pointer =
1885       LoadObjectField(typed_array, FixedTypedArrayBase::kBasePointerOffset);
1886   return UncheckedCast<RawPtrT>(
1887       IntPtrAdd(external_pointer, BitcastTaggedToWord(base_pointer)));
1888 }
1889 
LoadFixedBigInt64ArrayElementAsTagged(Node * data_pointer,Node * offset)1890 Node* CodeStubAssembler::LoadFixedBigInt64ArrayElementAsTagged(
1891     Node* data_pointer, Node* offset) {
1892   TVARIABLE(BigInt, var_result);
1893   Label done(this), if_zero(this);
1894   if (Is64()) {
1895     TNode<IntPtrT> value = UncheckedCast<IntPtrT>(
1896         Load(MachineType::IntPtr(), data_pointer, offset));
1897     Label if_positive(this), if_negative(this);
1898     GotoIf(IntPtrEqual(value, IntPtrConstant(0)), &if_zero);
1899     var_result = AllocateRawBigInt(IntPtrConstant(1));
1900     Branch(IntPtrGreaterThan(value, IntPtrConstant(0)), &if_positive,
1901            &if_negative);
1902 
1903     BIND(&if_positive);
1904     {
1905       StoreBigIntBitfield(var_result.value(),
1906                           IntPtrConstant(BigInt::SignBits::encode(false) |
1907                                          BigInt::LengthBits::encode(1)));
1908       StoreBigIntDigit(var_result.value(), 0, Unsigned(value));
1909       Goto(&done);
1910     }
1911 
1912     BIND(&if_negative);
1913     {
1914       StoreBigIntBitfield(var_result.value(),
1915                           IntPtrConstant(BigInt::SignBits::encode(true) |
1916                                          BigInt::LengthBits::encode(1)));
1917       StoreBigIntDigit(var_result.value(), 0,
1918                        Unsigned(IntPtrSub(IntPtrConstant(0), value)));
1919       Goto(&done);
1920     }
1921   } else {
1922     DCHECK(!Is64());
1923     TVARIABLE(WordT, var_sign, IntPtrConstant(BigInt::SignBits::encode(false)));
1924     TVARIABLE(IntPtrT, var_low);
1925     TVARIABLE(IntPtrT, var_high);
1926 #if defined(V8_TARGET_BIG_ENDIAN)
1927     var_high = UncheckedCast<IntPtrT>(
1928         Load(MachineType::UintPtr(), data_pointer, offset));
1929     var_low = UncheckedCast<IntPtrT>(
1930         Load(MachineType::UintPtr(), data_pointer,
1931              Int32Add(offset, Int32Constant(kPointerSize))));
1932 #else
1933     var_low = UncheckedCast<IntPtrT>(
1934         Load(MachineType::UintPtr(), data_pointer, offset));
1935     var_high = UncheckedCast<IntPtrT>(
1936         Load(MachineType::UintPtr(), data_pointer,
1937              Int32Add(offset, Int32Constant(kPointerSize))));
1938 #endif
1939 
1940     Label high_zero(this), negative(this), allocate_one_digit(this),
1941         allocate_two_digits(this);
1942 
1943     GotoIf(WordEqual(var_high.value(), IntPtrConstant(0)), &high_zero);
1944     Branch(IntPtrLessThan(var_high.value(), IntPtrConstant(0)), &negative,
1945            &allocate_two_digits);
1946 
1947     BIND(&high_zero);
1948     Branch(WordEqual(var_low.value(), IntPtrConstant(0)), &if_zero,
1949            &allocate_one_digit);
1950 
1951     BIND(&negative);
1952     {
1953       var_sign = IntPtrConstant(BigInt::SignBits::encode(true));
1954       // We must negate the value by computing "0 - (high|low)", performing
1955       // both parts of the subtraction separately and manually taking care
1956       // of the carry bit (which is 1 iff low != 0).
1957       var_high = IntPtrSub(IntPtrConstant(0), var_high.value());
1958       Label carry(this), no_carry(this);
1959       Branch(WordEqual(var_low.value(), IntPtrConstant(0)), &no_carry, &carry);
1960       BIND(&carry);
1961       var_high = IntPtrSub(var_high.value(), IntPtrConstant(1));
1962       Goto(&no_carry);
1963       BIND(&no_carry);
1964       var_low = IntPtrSub(IntPtrConstant(0), var_low.value());
1965       // var_high was non-zero going into this block, but subtracting the
1966       // carry bit from it could bring us back onto the "one digit" path.
1967       Branch(WordEqual(var_high.value(), IntPtrConstant(0)),
1968              &allocate_one_digit, &allocate_two_digits);
1969     }
1970 
1971     BIND(&allocate_one_digit);
1972     {
1973       var_result = AllocateRawBigInt(IntPtrConstant(1));
1974       StoreBigIntBitfield(
1975           var_result.value(),
1976           WordOr(var_sign.value(),
1977                  IntPtrConstant(BigInt::LengthBits::encode(1))));
1978       StoreBigIntDigit(var_result.value(), 0, Unsigned(var_low.value()));
1979       Goto(&done);
1980     }
1981 
1982     BIND(&allocate_two_digits);
1983     {
1984       var_result = AllocateRawBigInt(IntPtrConstant(2));
1985       StoreBigIntBitfield(
1986           var_result.value(),
1987           WordOr(var_sign.value(),
1988                  IntPtrConstant(BigInt::LengthBits::encode(2))));
1989       StoreBigIntDigit(var_result.value(), 0, Unsigned(var_low.value()));
1990       StoreBigIntDigit(var_result.value(), 1, Unsigned(var_high.value()));
1991       Goto(&done);
1992     }
1993   }
1994   BIND(&if_zero);
1995   var_result = AllocateBigInt(IntPtrConstant(0));
1996   Goto(&done);
1997 
1998   BIND(&done);
1999   return var_result.value();
2000 }
2001 
LoadFixedBigUint64ArrayElementAsTagged(Node * data_pointer,Node * offset)2002 Node* CodeStubAssembler::LoadFixedBigUint64ArrayElementAsTagged(
2003     Node* data_pointer, Node* offset) {
2004   TVARIABLE(BigInt, var_result);
2005   Label if_zero(this), done(this);
2006   if (Is64()) {
2007     TNode<UintPtrT> value = UncheckedCast<UintPtrT>(
2008         Load(MachineType::UintPtr(), data_pointer, offset));
2009     GotoIf(IntPtrEqual(value, IntPtrConstant(0)), &if_zero);
2010     var_result = AllocateBigInt(IntPtrConstant(1));
2011     StoreBigIntDigit(var_result.value(), 0, value);
2012     Goto(&done);
2013   } else {
2014     DCHECK(!Is64());
2015     Label high_zero(this);
2016 
2017 #if defined(V8_TARGET_BIG_ENDIAN)
2018     TNode<UintPtrT> high = UncheckedCast<UintPtrT>(
2019         Load(MachineType::UintPtr(), data_pointer, offset));
2020     TNode<UintPtrT> low = UncheckedCast<UintPtrT>(
2021         Load(MachineType::UintPtr(), data_pointer,
2022              Int32Add(offset, Int32Constant(kPointerSize))));
2023 #else
2024     TNode<UintPtrT> low = UncheckedCast<UintPtrT>(
2025         Load(MachineType::UintPtr(), data_pointer, offset));
2026     TNode<UintPtrT> high = UncheckedCast<UintPtrT>(
2027         Load(MachineType::UintPtr(), data_pointer,
2028              Int32Add(offset, Int32Constant(kPointerSize))));
2029 #endif
2030 
2031     GotoIf(WordEqual(high, IntPtrConstant(0)), &high_zero);
2032     var_result = AllocateBigInt(IntPtrConstant(2));
2033     StoreBigIntDigit(var_result.value(), 0, low);
2034     StoreBigIntDigit(var_result.value(), 1, high);
2035     Goto(&done);
2036 
2037     BIND(&high_zero);
2038     GotoIf(WordEqual(low, IntPtrConstant(0)), &if_zero);
2039     var_result = AllocateBigInt(IntPtrConstant(1));
2040     StoreBigIntDigit(var_result.value(), 0, low);
2041     Goto(&done);
2042   }
2043   BIND(&if_zero);
2044   var_result = AllocateBigInt(IntPtrConstant(0));
2045   Goto(&done);
2046 
2047   BIND(&done);
2048   return var_result.value();
2049 }
2050 
LoadFixedTypedArrayElementAsTagged(Node * data_pointer,Node * index_node,ElementsKind elements_kind,ParameterMode parameter_mode)2051 Node* CodeStubAssembler::LoadFixedTypedArrayElementAsTagged(
2052     Node* data_pointer, Node* index_node, ElementsKind elements_kind,
2053     ParameterMode parameter_mode) {
2054   Node* offset =
2055       ElementOffsetFromIndex(index_node, elements_kind, parameter_mode, 0);
2056   switch (elements_kind) {
2057     case UINT8_ELEMENTS: /* fall through */
2058     case UINT8_CLAMPED_ELEMENTS:
2059       return SmiFromInt32(Load(MachineType::Uint8(), data_pointer, offset));
2060     case INT8_ELEMENTS:
2061       return SmiFromInt32(Load(MachineType::Int8(), data_pointer, offset));
2062     case UINT16_ELEMENTS:
2063       return SmiFromInt32(Load(MachineType::Uint16(), data_pointer, offset));
2064     case INT16_ELEMENTS:
2065       return SmiFromInt32(Load(MachineType::Int16(), data_pointer, offset));
2066     case UINT32_ELEMENTS:
2067       return ChangeUint32ToTagged(
2068           Load(MachineType::Uint32(), data_pointer, offset));
2069     case INT32_ELEMENTS:
2070       return ChangeInt32ToTagged(
2071           Load(MachineType::Int32(), data_pointer, offset));
2072     case FLOAT32_ELEMENTS:
2073       return AllocateHeapNumberWithValue(ChangeFloat32ToFloat64(
2074           Load(MachineType::Float32(), data_pointer, offset)));
2075     case FLOAT64_ELEMENTS:
2076       return AllocateHeapNumberWithValue(
2077           Load(MachineType::Float64(), data_pointer, offset));
2078     case BIGINT64_ELEMENTS:
2079       return LoadFixedBigInt64ArrayElementAsTagged(data_pointer, offset);
2080     case BIGUINT64_ELEMENTS:
2081       return LoadFixedBigUint64ArrayElementAsTagged(data_pointer, offset);
2082     default:
2083       UNREACHABLE();
2084   }
2085 }
2086 
StoreFixedTypedArrayElementFromTagged(TNode<Context> context,TNode<FixedTypedArrayBase> elements,TNode<Object> index_node,TNode<Object> value,ElementsKind elements_kind,ParameterMode parameter_mode)2087 void CodeStubAssembler::StoreFixedTypedArrayElementFromTagged(
2088     TNode<Context> context, TNode<FixedTypedArrayBase> elements,
2089     TNode<Object> index_node, TNode<Object> value, ElementsKind elements_kind,
2090     ParameterMode parameter_mode) {
2091   TNode<RawPtrT> data_pointer = LoadFixedTypedArrayBackingStore(elements);
2092   switch (elements_kind) {
2093     case UINT8_ELEMENTS:
2094     case UINT8_CLAMPED_ELEMENTS:
2095     case INT8_ELEMENTS:
2096     case UINT16_ELEMENTS:
2097     case INT16_ELEMENTS:
2098       StoreElement(data_pointer, elements_kind, index_node,
2099                    SmiToInt32(CAST(value)), parameter_mode);
2100       break;
2101     case UINT32_ELEMENTS:
2102     case INT32_ELEMENTS:
2103       StoreElement(data_pointer, elements_kind, index_node,
2104                    TruncateTaggedToWord32(context, value), parameter_mode);
2105       break;
2106     case FLOAT32_ELEMENTS:
2107       StoreElement(data_pointer, elements_kind, index_node,
2108                    TruncateFloat64ToFloat32(LoadHeapNumberValue(CAST(value))),
2109                    parameter_mode);
2110       break;
2111     case FLOAT64_ELEMENTS:
2112       StoreElement(data_pointer, elements_kind, index_node,
2113                    LoadHeapNumberValue(CAST(value)), parameter_mode);
2114       break;
2115     case BIGUINT64_ELEMENTS:
2116     case BIGINT64_ELEMENTS: {
2117       TNode<IntPtrT> offset =
2118           ElementOffsetFromIndex(index_node, elements_kind, parameter_mode, 0);
2119       EmitBigTypedArrayElementStore(elements, data_pointer, offset,
2120                                     CAST(value));
2121       break;
2122     }
2123     default:
2124       UNREACHABLE();
2125   }
2126 }
2127 
LoadFeedbackVectorSlot(Node * object,Node * slot_index_node,int additional_offset,ParameterMode parameter_mode)2128 TNode<MaybeObject> CodeStubAssembler::LoadFeedbackVectorSlot(
2129     Node* object, Node* slot_index_node, int additional_offset,
2130     ParameterMode parameter_mode) {
2131   CSA_SLOW_ASSERT(this, IsFeedbackVector(object));
2132   CSA_SLOW_ASSERT(this, MatchesParameterMode(slot_index_node, parameter_mode));
2133   int32_t header_size =
2134       FeedbackVector::kFeedbackSlotsOffset + additional_offset - kHeapObjectTag;
2135   Node* offset = ElementOffsetFromIndex(slot_index_node, HOLEY_ELEMENTS,
2136                                         parameter_mode, header_size);
2137   CSA_SLOW_ASSERT(
2138       this, IsOffsetInBounds(offset, LoadFeedbackVectorLength(CAST(object)),
2139                              FeedbackVector::kHeaderSize));
2140   return UncheckedCast<MaybeObject>(
2141       Load(MachineType::AnyTagged(), object, offset));
2142 }
2143 
LoadAndUntagToWord32ArrayElement(SloppyTNode<HeapObject> object,int array_header_size,Node * index_node,int additional_offset,ParameterMode parameter_mode)2144 TNode<Int32T> CodeStubAssembler::LoadAndUntagToWord32ArrayElement(
2145     SloppyTNode<HeapObject> object, int array_header_size, Node* index_node,
2146     int additional_offset, ParameterMode parameter_mode) {
2147   CSA_SLOW_ASSERT(this, MatchesParameterMode(index_node, parameter_mode));
2148   DCHECK_EQ(additional_offset % kPointerSize, 0);
2149   int endian_correction = 0;
2150 #if V8_TARGET_LITTLE_ENDIAN
2151   if (Is64()) endian_correction = kPointerSize / 2;
2152 #endif
2153   int32_t header_size = array_header_size + additional_offset - kHeapObjectTag +
2154                         endian_correction;
2155   Node* offset = ElementOffsetFromIndex(index_node, HOLEY_ELEMENTS,
2156                                         parameter_mode, header_size);
2157   STATIC_ASSERT(FixedArrayBase::kLengthOffset == WeakFixedArray::kLengthOffset);
2158   // Check that index_node + additional_offset <= object.length.
2159   // TODO(cbruni): Use proper LoadXXLength helpers
2160   CSA_SLOW_ASSERT(
2161       this, IsOffsetInBounds(
2162                 offset,
2163                 LoadAndUntagObjectField(object, FixedArrayBase::kLengthOffset),
2164                 FixedArray::kHeaderSize + endian_correction));
2165   if (Is64()) {
2166     return UncheckedCast<Int32T>(Load(MachineType::Int32(), object, offset));
2167   } else {
2168     return SmiToInt32(Load(MachineType::AnyTagged(), object, offset));
2169   }
2170 }
2171 
LoadAndUntagToWord32FixedArrayElement(SloppyTNode<HeapObject> object,Node * index_node,int additional_offset,ParameterMode parameter_mode)2172 TNode<Int32T> CodeStubAssembler::LoadAndUntagToWord32FixedArrayElement(
2173     SloppyTNode<HeapObject> object, Node* index_node, int additional_offset,
2174     ParameterMode parameter_mode) {
2175   CSA_SLOW_ASSERT(this, IsFixedArraySubclass(object));
2176   return LoadAndUntagToWord32ArrayElement(object, FixedArray::kHeaderSize,
2177                                           index_node, additional_offset,
2178                                           parameter_mode);
2179 }
2180 
LoadWeakFixedArrayElement(TNode<WeakFixedArray> object,Node * index,int additional_offset,ParameterMode parameter_mode,LoadSensitivity needs_poisoning)2181 TNode<MaybeObject> CodeStubAssembler::LoadWeakFixedArrayElement(
2182     TNode<WeakFixedArray> object, Node* index, int additional_offset,
2183     ParameterMode parameter_mode, LoadSensitivity needs_poisoning) {
2184   return LoadArrayElement(object, WeakFixedArray::kHeaderSize, index,
2185                           additional_offset, parameter_mode, needs_poisoning);
2186 }
2187 
LoadFixedDoubleArrayElement(Node * object,Node * index_node,MachineType machine_type,int additional_offset,ParameterMode parameter_mode,Label * if_hole)2188 Node* CodeStubAssembler::LoadFixedDoubleArrayElement(
2189     Node* object, Node* index_node, MachineType machine_type,
2190     int additional_offset, ParameterMode parameter_mode, Label* if_hole) {
2191   CSA_ASSERT(this, IsFixedDoubleArray(object));
2192   DCHECK_EQ(additional_offset % kPointerSize, 0);
2193   CSA_SLOW_ASSERT(this, MatchesParameterMode(index_node, parameter_mode));
2194   int32_t header_size =
2195       FixedDoubleArray::kHeaderSize + additional_offset - kHeapObjectTag;
2196   Node* offset = ElementOffsetFromIndex(index_node, HOLEY_DOUBLE_ELEMENTS,
2197                                         parameter_mode, header_size);
2198   CSA_SLOW_ASSERT(
2199       this,
2200       IsOffsetInBounds(offset, LoadAndUntagFixedArrayBaseLength(object),
2201                        FixedDoubleArray::kHeaderSize, HOLEY_DOUBLE_ELEMENTS));
2202   return LoadDoubleWithHoleCheck(object, offset, if_hole, machine_type);
2203 }
2204 
LoadDoubleWithHoleCheck(Node * base,Node * offset,Label * if_hole,MachineType machine_type)2205 Node* CodeStubAssembler::LoadDoubleWithHoleCheck(Node* base, Node* offset,
2206                                                  Label* if_hole,
2207                                                  MachineType machine_type) {
2208   if (if_hole) {
2209     // TODO(ishell): Compare only the upper part for the hole once the
2210     // compiler is able to fold addition of already complex |offset| with
2211     // |kIeeeDoubleExponentWordOffset| into one addressing mode.
2212     if (Is64()) {
2213       Node* element = Load(MachineType::Uint64(), base, offset);
2214       GotoIf(Word64Equal(element, Int64Constant(kHoleNanInt64)), if_hole);
2215     } else {
2216       Node* element_upper = Load(
2217           MachineType::Uint32(), base,
2218           IntPtrAdd(offset, IntPtrConstant(kIeeeDoubleExponentWordOffset)));
2219       GotoIf(Word32Equal(element_upper, Int32Constant(kHoleNanUpper32)),
2220              if_hole);
2221     }
2222   }
2223   if (machine_type.IsNone()) {
2224     // This means the actual value is not needed.
2225     return nullptr;
2226   }
2227   return Load(machine_type, base, offset);
2228 }
2229 
LoadContextElement(SloppyTNode<Context> context,int slot_index)2230 TNode<Object> CodeStubAssembler::LoadContextElement(
2231     SloppyTNode<Context> context, int slot_index) {
2232   int offset = Context::SlotOffset(slot_index);
2233   return UncheckedCast<Object>(
2234       Load(MachineType::AnyTagged(), context, IntPtrConstant(offset)));
2235 }
2236 
LoadContextElement(SloppyTNode<Context> context,SloppyTNode<IntPtrT> slot_index)2237 TNode<Object> CodeStubAssembler::LoadContextElement(
2238     SloppyTNode<Context> context, SloppyTNode<IntPtrT> slot_index) {
2239   Node* offset =
2240       IntPtrAdd(TimesPointerSize(slot_index),
2241                 IntPtrConstant(Context::kHeaderSize - kHeapObjectTag));
2242   return UncheckedCast<Object>(Load(MachineType::AnyTagged(), context, offset));
2243 }
2244 
StoreContextElement(SloppyTNode<Context> context,int slot_index,SloppyTNode<Object> value)2245 void CodeStubAssembler::StoreContextElement(SloppyTNode<Context> context,
2246                                             int slot_index,
2247                                             SloppyTNode<Object> value) {
2248   int offset = Context::SlotOffset(slot_index);
2249   Store(context, IntPtrConstant(offset), value);
2250 }
2251 
StoreContextElement(SloppyTNode<Context> context,SloppyTNode<IntPtrT> slot_index,SloppyTNode<Object> value)2252 void CodeStubAssembler::StoreContextElement(SloppyTNode<Context> context,
2253                                             SloppyTNode<IntPtrT> slot_index,
2254                                             SloppyTNode<Object> value) {
2255   Node* offset =
2256       IntPtrAdd(TimesPointerSize(slot_index),
2257                 IntPtrConstant(Context::kHeaderSize - kHeapObjectTag));
2258   Store(context, offset, value);
2259 }
2260 
StoreContextElementNoWriteBarrier(SloppyTNode<Context> context,int slot_index,SloppyTNode<Object> value)2261 void CodeStubAssembler::StoreContextElementNoWriteBarrier(
2262     SloppyTNode<Context> context, int slot_index, SloppyTNode<Object> value) {
2263   int offset = Context::SlotOffset(slot_index);
2264   StoreNoWriteBarrier(MachineRepresentation::kTagged, context,
2265                       IntPtrConstant(offset), value);
2266 }
2267 
LoadNativeContext(SloppyTNode<Context> context)2268 TNode<Context> CodeStubAssembler::LoadNativeContext(
2269     SloppyTNode<Context> context) {
2270   return UncheckedCast<Context>(
2271       LoadContextElement(context, Context::NATIVE_CONTEXT_INDEX));
2272 }
2273 
LoadModuleContext(SloppyTNode<Context> context)2274 TNode<Context> CodeStubAssembler::LoadModuleContext(
2275     SloppyTNode<Context> context) {
2276   Node* module_map = LoadRoot(Heap::kModuleContextMapRootIndex);
2277   Variable cur_context(this, MachineRepresentation::kTaggedPointer);
2278   cur_context.Bind(context);
2279 
2280   Label context_found(this);
2281 
2282   Variable* context_search_loop_variables[1] = {&cur_context};
2283   Label context_search(this, 1, context_search_loop_variables);
2284 
2285   // Loop until cur_context->map() is module_map.
2286   Goto(&context_search);
2287   BIND(&context_search);
2288   {
2289     CSA_ASSERT(this, Word32BinaryNot(IsNativeContext(cur_context.value())));
2290     GotoIf(WordEqual(LoadMap(cur_context.value()), module_map), &context_found);
2291 
2292     cur_context.Bind(
2293         LoadContextElement(cur_context.value(), Context::PREVIOUS_INDEX));
2294     Goto(&context_search);
2295   }
2296 
2297   BIND(&context_found);
2298   return UncheckedCast<Context>(cur_context.value());
2299 }
2300 
LoadJSArrayElementsMap(SloppyTNode<Int32T> kind,SloppyTNode<Context> native_context)2301 TNode<Map> CodeStubAssembler::LoadJSArrayElementsMap(
2302     SloppyTNode<Int32T> kind, SloppyTNode<Context> native_context) {
2303   CSA_ASSERT(this, IsFastElementsKind(kind));
2304   CSA_ASSERT(this, IsNativeContext(native_context));
2305   Node* offset = IntPtrAdd(IntPtrConstant(Context::FIRST_JS_ARRAY_MAP_SLOT),
2306                            ChangeInt32ToIntPtr(kind));
2307   return UncheckedCast<Map>(LoadContextElement(native_context, offset));
2308 }
2309 
LoadJSArrayElementsMap(ElementsKind kind,SloppyTNode<Context> native_context)2310 TNode<Map> CodeStubAssembler::LoadJSArrayElementsMap(
2311     ElementsKind kind, SloppyTNode<Context> native_context) {
2312   CSA_ASSERT(this, IsNativeContext(native_context));
2313   return UncheckedCast<Map>(
2314       LoadContextElement(native_context, Context::ArrayMapIndex(kind)));
2315 }
2316 
LoadJSFunctionPrototype(Node * function,Label * if_bailout)2317 Node* CodeStubAssembler::LoadJSFunctionPrototype(Node* function,
2318                                                  Label* if_bailout) {
2319   CSA_ASSERT(this, TaggedIsNotSmi(function));
2320   CSA_ASSERT(this, IsJSFunction(function));
2321   CSA_ASSERT(this, IsFunctionWithPrototypeSlotMap(LoadMap(function)));
2322   CSA_ASSERT(this, IsClearWord32<Map::HasNonInstancePrototypeBit>(
2323                        LoadMapBitField(LoadMap(function))));
2324   Node* proto_or_map =
2325       LoadObjectField(function, JSFunction::kPrototypeOrInitialMapOffset);
2326   GotoIf(IsTheHole(proto_or_map), if_bailout);
2327 
2328   VARIABLE(var_result, MachineRepresentation::kTagged, proto_or_map);
2329   Label done(this, &var_result);
2330   GotoIfNot(IsMap(proto_or_map), &done);
2331 
2332   var_result.Bind(LoadMapPrototype(proto_or_map));
2333   Goto(&done);
2334 
2335   BIND(&done);
2336   return var_result.value();
2337 }
2338 
LoadSharedFunctionInfoBytecodeArray(Node * shared)2339 Node* CodeStubAssembler::LoadSharedFunctionInfoBytecodeArray(Node* shared) {
2340   CSA_ASSERT(this, TaggedIsNotSmi(shared));
2341   CSA_ASSERT(this, IsSharedFunctionInfo(shared));
2342 
2343   Node* function_data =
2344       LoadObjectField(shared, SharedFunctionInfo::kFunctionDataOffset);
2345 
2346   VARIABLE(var_result, MachineRepresentation::kTagged, function_data);
2347   Label done(this, &var_result);
2348 
2349   GotoIfNot(HasInstanceType(function_data, INTERPRETER_DATA_TYPE), &done);
2350   Node* bytecode_array =
2351       LoadObjectField(function_data, InterpreterData::kBytecodeArrayOffset);
2352   var_result.Bind(bytecode_array);
2353   Goto(&done);
2354 
2355   BIND(&done);
2356   return var_result.value();
2357 }
2358 
StoreHeapNumberValue(SloppyTNode<HeapNumber> object,SloppyTNode<Float64T> value)2359 void CodeStubAssembler::StoreHeapNumberValue(SloppyTNode<HeapNumber> object,
2360                                              SloppyTNode<Float64T> value) {
2361   StoreObjectFieldNoWriteBarrier(object, HeapNumber::kValueOffset, value,
2362                                  MachineRepresentation::kFloat64);
2363 }
2364 
StoreObjectField(Node * object,int offset,Node * value)2365 Node* CodeStubAssembler::StoreObjectField(
2366     Node* object, int offset, Node* value) {
2367   DCHECK_NE(HeapObject::kMapOffset, offset);  // Use StoreMap instead.
2368   return Store(object, IntPtrConstant(offset - kHeapObjectTag), value);
2369 }
2370 
StoreObjectField(Node * object,Node * offset,Node * value)2371 Node* CodeStubAssembler::StoreObjectField(Node* object, Node* offset,
2372                                           Node* value) {
2373   int const_offset;
2374   if (ToInt32Constant(offset, const_offset)) {
2375     return StoreObjectField(object, const_offset, value);
2376   }
2377   return Store(object, IntPtrSub(offset, IntPtrConstant(kHeapObjectTag)),
2378                value);
2379 }
2380 
StoreObjectFieldNoWriteBarrier(Node * object,int offset,Node * value,MachineRepresentation rep)2381 Node* CodeStubAssembler::StoreObjectFieldNoWriteBarrier(
2382     Node* object, int offset, Node* value, MachineRepresentation rep) {
2383   return StoreNoWriteBarrier(rep, object,
2384                              IntPtrConstant(offset - kHeapObjectTag), value);
2385 }
2386 
StoreObjectFieldNoWriteBarrier(Node * object,Node * offset,Node * value,MachineRepresentation rep)2387 Node* CodeStubAssembler::StoreObjectFieldNoWriteBarrier(
2388     Node* object, Node* offset, Node* value, MachineRepresentation rep) {
2389   int const_offset;
2390   if (ToInt32Constant(offset, const_offset)) {
2391     return StoreObjectFieldNoWriteBarrier(object, const_offset, value, rep);
2392   }
2393   return StoreNoWriteBarrier(
2394       rep, object, IntPtrSub(offset, IntPtrConstant(kHeapObjectTag)), value);
2395 }
2396 
StoreMap(Node * object,Node * map)2397 Node* CodeStubAssembler::StoreMap(Node* object, Node* map) {
2398   CSA_SLOW_ASSERT(this, IsMap(map));
2399   return StoreWithMapWriteBarrier(
2400       object, IntPtrConstant(HeapObject::kMapOffset - kHeapObjectTag), map);
2401 }
2402 
StoreMapNoWriteBarrier(Node * object,Heap::RootListIndex map_root_index)2403 Node* CodeStubAssembler::StoreMapNoWriteBarrier(
2404     Node* object, Heap::RootListIndex map_root_index) {
2405   return StoreMapNoWriteBarrier(object, LoadRoot(map_root_index));
2406 }
2407 
StoreMapNoWriteBarrier(Node * object,Node * map)2408 Node* CodeStubAssembler::StoreMapNoWriteBarrier(Node* object, Node* map) {
2409   CSA_SLOW_ASSERT(this, IsMap(map));
2410   return StoreNoWriteBarrier(
2411       MachineRepresentation::kTagged, object,
2412       IntPtrConstant(HeapObject::kMapOffset - kHeapObjectTag), map);
2413 }
2414 
StoreObjectFieldRoot(Node * object,int offset,Heap::RootListIndex root_index)2415 Node* CodeStubAssembler::StoreObjectFieldRoot(Node* object, int offset,
2416                                               Heap::RootListIndex root_index) {
2417   if (Heap::RootIsImmortalImmovable(root_index)) {
2418     return StoreObjectFieldNoWriteBarrier(object, offset, LoadRoot(root_index));
2419   } else {
2420     return StoreObjectField(object, offset, LoadRoot(root_index));
2421   }
2422 }
2423 
StoreJSArrayLength(TNode<JSArray> array,TNode<Smi> length)2424 Node* CodeStubAssembler::StoreJSArrayLength(TNode<JSArray> array,
2425                                             TNode<Smi> length) {
2426   return StoreObjectFieldNoWriteBarrier(array, JSArray::kLengthOffset, length);
2427 }
2428 
StoreElements(TNode<Object> object,TNode<FixedArrayBase> elements)2429 Node* CodeStubAssembler::StoreElements(TNode<Object> object,
2430                                        TNode<FixedArrayBase> elements) {
2431   return StoreObjectField(object, JSObject::kElementsOffset, elements);
2432 }
2433 
StoreFixedArrayElement(Node * object,Node * index_node,Node * value,WriteBarrierMode barrier_mode,int additional_offset,ParameterMode parameter_mode)2434 Node* CodeStubAssembler::StoreFixedArrayElement(Node* object, Node* index_node,
2435                                                 Node* value,
2436                                                 WriteBarrierMode barrier_mode,
2437                                                 int additional_offset,
2438                                                 ParameterMode parameter_mode) {
2439   CSA_SLOW_ASSERT(
2440       this,
2441       Word32Or(IsHashTable(object),
2442                Word32Or(IsFixedArray(object),
2443                         Word32Or(IsPropertyArray(object), IsContext(object)))));
2444   CSA_SLOW_ASSERT(this, MatchesParameterMode(index_node, parameter_mode));
2445   DCHECK(barrier_mode == SKIP_WRITE_BARRIER ||
2446          barrier_mode == UPDATE_WRITE_BARRIER);
2447   DCHECK_EQ(additional_offset % kPointerSize, 0);
2448   STATIC_ASSERT(FixedArray::kHeaderSize == PropertyArray::kHeaderSize);
2449   int header_size =
2450       FixedArray::kHeaderSize + additional_offset - kHeapObjectTag;
2451   Node* offset = ElementOffsetFromIndex(index_node, HOLEY_ELEMENTS,
2452                                         parameter_mode, header_size);
2453   // TODO(cbruni): Enable check once we have TNodes in this method. Currently
2454   // the bounds check will fail for PropertyArray due to the different length
2455   // encoding.
2456   // CSA_ASSERT(this,
2457   //         IsOffsetInBounds(offset, LoadAndUntagFixedArrayBaseLength(object),
2458   //                             FixedArray::kHeaderSize));
2459   if (barrier_mode == SKIP_WRITE_BARRIER) {
2460     return StoreNoWriteBarrier(MachineRepresentation::kTagged, object, offset,
2461                                value);
2462   } else {
2463     return Store(object, offset, value);
2464   }
2465 }
2466 
StoreFixedDoubleArrayElement(Node * object,Node * index_node,Node * value,ParameterMode parameter_mode)2467 Node* CodeStubAssembler::StoreFixedDoubleArrayElement(
2468     Node* object, Node* index_node, Node* value, ParameterMode parameter_mode) {
2469   CSA_ASSERT(this, IsFixedDoubleArray(object));
2470   CSA_SLOW_ASSERT(this, MatchesParameterMode(index_node, parameter_mode));
2471   Node* offset =
2472       ElementOffsetFromIndex(index_node, PACKED_DOUBLE_ELEMENTS, parameter_mode,
2473                              FixedArray::kHeaderSize - kHeapObjectTag);
2474   CSA_ASSERT(this, IsOffsetInBounds(
2475                        offset, LoadAndUntagFixedArrayBaseLength(object),
2476                        FixedDoubleArray::kHeaderSize, PACKED_DOUBLE_ELEMENTS));
2477   MachineRepresentation rep = MachineRepresentation::kFloat64;
2478   return StoreNoWriteBarrier(rep, object, offset, value);
2479 }
2480 
StoreFeedbackVectorSlot(Node * object,Node * slot_index_node,Node * value,WriteBarrierMode barrier_mode,int additional_offset,ParameterMode parameter_mode)2481 Node* CodeStubAssembler::StoreFeedbackVectorSlot(Node* object,
2482                                                  Node* slot_index_node,
2483                                                  Node* value,
2484                                                  WriteBarrierMode barrier_mode,
2485                                                  int additional_offset,
2486                                                  ParameterMode parameter_mode) {
2487   CSA_SLOW_ASSERT(this, IsFeedbackVector(object));
2488   CSA_SLOW_ASSERT(this, MatchesParameterMode(slot_index_node, parameter_mode));
2489   DCHECK_EQ(additional_offset % kPointerSize, 0);
2490   DCHECK(barrier_mode == SKIP_WRITE_BARRIER ||
2491          barrier_mode == UPDATE_WRITE_BARRIER);
2492   int header_size =
2493       FeedbackVector::kFeedbackSlotsOffset + additional_offset - kHeapObjectTag;
2494   Node* offset = ElementOffsetFromIndex(slot_index_node, HOLEY_ELEMENTS,
2495                                         parameter_mode, header_size);
2496   // Check that slot_index_node <= object.length.
2497   CSA_ASSERT(this,
2498              IsOffsetInBounds(offset, LoadFeedbackVectorLength(CAST(object)),
2499                               FeedbackVector::kHeaderSize));
2500   if (barrier_mode == SKIP_WRITE_BARRIER) {
2501     return StoreNoWriteBarrier(MachineRepresentation::kTagged, object, offset,
2502                                value);
2503   } else {
2504     return Store(object, offset, value);
2505   }
2506 }
2507 
EnsureArrayLengthWritable(TNode<Map> map,Label * bailout)2508 void CodeStubAssembler::EnsureArrayLengthWritable(TNode<Map> map,
2509                                                   Label* bailout) {
2510   // Don't support arrays in dictionary named property mode.
2511   GotoIf(IsDictionaryMap(map), bailout);
2512 
2513   // Check whether the length property is writable. The length property is the
2514   // only default named property on arrays. It's nonconfigurable, hence is
2515   // guaranteed to stay the first property.
2516   TNode<DescriptorArray> descriptors = LoadMapDescriptors(map);
2517 
2518   int length_index = JSArray::kLengthDescriptorIndex;
2519 #ifdef DEBUG
2520   TNode<Name> maybe_length = CAST(LoadFixedArrayElement(
2521       descriptors, DescriptorArray::ToKeyIndex(length_index)));
2522   CSA_ASSERT(this,
2523              WordEqual(maybe_length, LoadRoot(Heap::klength_stringRootIndex)));
2524 #endif
2525 
2526   TNode<Int32T> details = LoadAndUntagToWord32FixedArrayElement(
2527       descriptors, DescriptorArray::ToDetailsIndex(length_index));
2528   GotoIf(IsSetWord32(details, PropertyDetails::kAttributesReadOnlyMask),
2529          bailout);
2530 }
2531 
EnsureArrayPushable(TNode<Map> map,Label * bailout)2532 TNode<Int32T> CodeStubAssembler::EnsureArrayPushable(TNode<Map> map,
2533                                                      Label* bailout) {
2534   // Disallow pushing onto prototypes. It might be the JSArray prototype.
2535   // Disallow pushing onto non-extensible objects.
2536   Comment("Disallow pushing onto prototypes");
2537   Node* bit_field2 = LoadMapBitField2(map);
2538   int mask = Map::IsPrototypeMapBit::kMask | Map::IsExtensibleBit::kMask;
2539   Node* test = Word32And(bit_field2, Int32Constant(mask));
2540   GotoIf(Word32NotEqual(test, Int32Constant(Map::IsExtensibleBit::kMask)),
2541          bailout);
2542 
2543   EnsureArrayLengthWritable(map, bailout);
2544 
2545   TNode<Uint32T> kind = DecodeWord32<Map::ElementsKindBits>(bit_field2);
2546   return Signed(kind);
2547 }
2548 
PossiblyGrowElementsCapacity(ParameterMode mode,ElementsKind kind,Node * array,Node * length,Variable * var_elements,Node * growth,Label * bailout)2549 void CodeStubAssembler::PossiblyGrowElementsCapacity(
2550     ParameterMode mode, ElementsKind kind, Node* array, Node* length,
2551     Variable* var_elements, Node* growth, Label* bailout) {
2552   Label fits(this, var_elements);
2553   Node* capacity =
2554       TaggedToParameter(LoadFixedArrayBaseLength(var_elements->value()), mode);
2555   // length and growth nodes are already in a ParameterMode appropriate
2556   // representation.
2557   Node* new_length = IntPtrOrSmiAdd(growth, length, mode);
2558   GotoIfNot(IntPtrOrSmiGreaterThan(new_length, capacity, mode), &fits);
2559   Node* new_capacity = CalculateNewElementsCapacity(new_length, mode);
2560   var_elements->Bind(GrowElementsCapacity(array, var_elements->value(), kind,
2561                                           kind, capacity, new_capacity, mode,
2562                                           bailout));
2563   Goto(&fits);
2564   BIND(&fits);
2565 }
2566 
BuildAppendJSArray(ElementsKind kind,SloppyTNode<JSArray> array,CodeStubArguments * args,TVariable<IntPtrT> * arg_index,Label * bailout)2567 TNode<Smi> CodeStubAssembler::BuildAppendJSArray(ElementsKind kind,
2568                                                  SloppyTNode<JSArray> array,
2569                                                  CodeStubArguments* args,
2570                                                  TVariable<IntPtrT>* arg_index,
2571                                                  Label* bailout) {
2572   CSA_SLOW_ASSERT(this, IsJSArray(array));
2573   Comment("BuildAppendJSArray: %s", ElementsKindToString(kind));
2574   Label pre_bailout(this);
2575   Label success(this);
2576   TVARIABLE(Smi, var_tagged_length);
2577   ParameterMode mode = OptimalParameterMode();
2578   VARIABLE(var_length, OptimalParameterRepresentation(),
2579            TaggedToParameter(LoadFastJSArrayLength(array), mode));
2580   VARIABLE(var_elements, MachineRepresentation::kTagged, LoadElements(array));
2581 
2582   // Resize the capacity of the fixed array if it doesn't fit.
2583   TNode<IntPtrT> first = arg_index->value();
2584   Node* growth = IntPtrToParameter(
2585       IntPtrSub(UncheckedCast<IntPtrT>(args->GetLength(INTPTR_PARAMETERS)),
2586                 first),
2587       mode);
2588   PossiblyGrowElementsCapacity(mode, kind, array, var_length.value(),
2589                                &var_elements, growth, &pre_bailout);
2590 
2591   // Push each argument onto the end of the array now that there is enough
2592   // capacity.
2593   CodeStubAssembler::VariableList push_vars({&var_length}, zone());
2594   Node* elements = var_elements.value();
2595   args->ForEach(
2596       push_vars,
2597       [this, kind, mode, elements, &var_length, &pre_bailout](Node* arg) {
2598         TryStoreArrayElement(kind, mode, &pre_bailout, elements,
2599                              var_length.value(), arg);
2600         Increment(&var_length, 1, mode);
2601       },
2602       first, nullptr);
2603   {
2604     TNode<Smi> length = ParameterToTagged(var_length.value(), mode);
2605     var_tagged_length = length;
2606     StoreObjectFieldNoWriteBarrier(array, JSArray::kLengthOffset, length);
2607     Goto(&success);
2608   }
2609 
2610   BIND(&pre_bailout);
2611   {
2612     TNode<Smi> length = ParameterToTagged(var_length.value(), mode);
2613     var_tagged_length = length;
2614     Node* diff = SmiSub(length, LoadFastJSArrayLength(array));
2615     StoreObjectFieldNoWriteBarrier(array, JSArray::kLengthOffset, length);
2616     *arg_index = IntPtrAdd(arg_index->value(), SmiUntag(diff));
2617     Goto(bailout);
2618   }
2619 
2620   BIND(&success);
2621   return var_tagged_length.value();
2622 }
2623 
TryStoreArrayElement(ElementsKind kind,ParameterMode mode,Label * bailout,Node * elements,Node * index,Node * value)2624 void CodeStubAssembler::TryStoreArrayElement(ElementsKind kind,
2625                                              ParameterMode mode, Label* bailout,
2626                                              Node* elements, Node* index,
2627                                              Node* value) {
2628   if (IsSmiElementsKind(kind)) {
2629     GotoIf(TaggedIsNotSmi(value), bailout);
2630   } else if (IsDoubleElementsKind(kind)) {
2631     GotoIfNotNumber(value, bailout);
2632   }
2633   if (IsDoubleElementsKind(kind)) value = ChangeNumberToFloat64(value);
2634   StoreElement(elements, kind, index, value, mode);
2635 }
2636 
BuildAppendJSArray(ElementsKind kind,Node * array,Node * value,Label * bailout)2637 void CodeStubAssembler::BuildAppendJSArray(ElementsKind kind, Node* array,
2638                                            Node* value, Label* bailout) {
2639   CSA_SLOW_ASSERT(this, IsJSArray(array));
2640   Comment("BuildAppendJSArray: %s", ElementsKindToString(kind));
2641   ParameterMode mode = OptimalParameterMode();
2642   VARIABLE(var_length, OptimalParameterRepresentation(),
2643            TaggedToParameter(LoadFastJSArrayLength(array), mode));
2644   VARIABLE(var_elements, MachineRepresentation::kTagged, LoadElements(array));
2645 
2646   // Resize the capacity of the fixed array if it doesn't fit.
2647   Node* growth = IntPtrOrSmiConstant(1, mode);
2648   PossiblyGrowElementsCapacity(mode, kind, array, var_length.value(),
2649                                &var_elements, growth, bailout);
2650 
2651   // Push each argument onto the end of the array now that there is enough
2652   // capacity.
2653   TryStoreArrayElement(kind, mode, bailout, var_elements.value(),
2654                        var_length.value(), value);
2655   Increment(&var_length, 1, mode);
2656 
2657   Node* length = ParameterToTagged(var_length.value(), mode);
2658   StoreObjectFieldNoWriteBarrier(array, JSArray::kLengthOffset, length);
2659 }
2660 
AllocateCellWithValue(Node * value,WriteBarrierMode mode)2661 Node* CodeStubAssembler::AllocateCellWithValue(Node* value,
2662                                                WriteBarrierMode mode) {
2663   Node* result = Allocate(Cell::kSize, kNone);
2664   StoreMapNoWriteBarrier(result, Heap::kCellMapRootIndex);
2665   StoreCellValue(result, value, mode);
2666   return result;
2667 }
2668 
LoadCellValue(Node * cell)2669 Node* CodeStubAssembler::LoadCellValue(Node* cell) {
2670   CSA_SLOW_ASSERT(this, HasInstanceType(cell, CELL_TYPE));
2671   return LoadObjectField(cell, Cell::kValueOffset);
2672 }
2673 
StoreCellValue(Node * cell,Node * value,WriteBarrierMode mode)2674 Node* CodeStubAssembler::StoreCellValue(Node* cell, Node* value,
2675                                         WriteBarrierMode mode) {
2676   CSA_SLOW_ASSERT(this, HasInstanceType(cell, CELL_TYPE));
2677   DCHECK(mode == SKIP_WRITE_BARRIER || mode == UPDATE_WRITE_BARRIER);
2678 
2679   if (mode == UPDATE_WRITE_BARRIER) {
2680     return StoreObjectField(cell, Cell::kValueOffset, value);
2681   } else {
2682     return StoreObjectFieldNoWriteBarrier(cell, Cell::kValueOffset, value);
2683   }
2684 }
2685 
AllocateHeapNumber(MutableMode mode)2686 TNode<HeapNumber> CodeStubAssembler::AllocateHeapNumber(MutableMode mode) {
2687   Node* result = Allocate(HeapNumber::kSize, kNone);
2688   Heap::RootListIndex heap_map_index =
2689       mode == IMMUTABLE ? Heap::kHeapNumberMapRootIndex
2690                         : Heap::kMutableHeapNumberMapRootIndex;
2691   StoreMapNoWriteBarrier(result, heap_map_index);
2692   return UncheckedCast<HeapNumber>(result);
2693 }
2694 
AllocateHeapNumberWithValue(SloppyTNode<Float64T> value,MutableMode mode)2695 TNode<HeapNumber> CodeStubAssembler::AllocateHeapNumberWithValue(
2696     SloppyTNode<Float64T> value, MutableMode mode) {
2697   TNode<HeapNumber> result = AllocateHeapNumber(mode);
2698   StoreHeapNumberValue(result, value);
2699   return result;
2700 }
2701 
AllocateBigInt(TNode<IntPtrT> length)2702 TNode<BigInt> CodeStubAssembler::AllocateBigInt(TNode<IntPtrT> length) {
2703   TNode<BigInt> result = AllocateRawBigInt(length);
2704   StoreBigIntBitfield(result, WordShl(length, BigInt::LengthBits::kShift));
2705   return result;
2706 }
2707 
AllocateRawBigInt(TNode<IntPtrT> length)2708 TNode<BigInt> CodeStubAssembler::AllocateRawBigInt(TNode<IntPtrT> length) {
2709   // This is currently used only for 64-bit wide BigInts. If more general
2710   // applicability is required, a large-object check must be added.
2711   CSA_ASSERT(this, UintPtrLessThan(length, IntPtrConstant(3)));
2712 
2713   TNode<IntPtrT> size = IntPtrAdd(IntPtrConstant(BigInt::kHeaderSize),
2714                                   Signed(WordShl(length, kPointerSizeLog2)));
2715   Node* raw_result = Allocate(size, kNone);
2716   StoreMapNoWriteBarrier(raw_result, Heap::kBigIntMapRootIndex);
2717   return UncheckedCast<BigInt>(raw_result);
2718 }
2719 
StoreBigIntBitfield(TNode<BigInt> bigint,TNode<WordT> bitfield)2720 void CodeStubAssembler::StoreBigIntBitfield(TNode<BigInt> bigint,
2721                                             TNode<WordT> bitfield) {
2722   StoreObjectFieldNoWriteBarrier(bigint, BigInt::kBitfieldOffset, bitfield,
2723                                  MachineType::PointerRepresentation());
2724 }
2725 
StoreBigIntDigit(TNode<BigInt> bigint,int digit_index,TNode<UintPtrT> digit)2726 void CodeStubAssembler::StoreBigIntDigit(TNode<BigInt> bigint, int digit_index,
2727                                          TNode<UintPtrT> digit) {
2728   StoreObjectFieldNoWriteBarrier(
2729       bigint, BigInt::kDigitsOffset + digit_index * kPointerSize, digit,
2730       UintPtrT::kMachineRepresentation);
2731 }
2732 
LoadBigIntBitfield(TNode<BigInt> bigint)2733 TNode<WordT> CodeStubAssembler::LoadBigIntBitfield(TNode<BigInt> bigint) {
2734   return UncheckedCast<WordT>(
2735       LoadObjectField(bigint, BigInt::kBitfieldOffset, MachineType::UintPtr()));
2736 }
2737 
LoadBigIntDigit(TNode<BigInt> bigint,int digit_index)2738 TNode<UintPtrT> CodeStubAssembler::LoadBigIntDigit(TNode<BigInt> bigint,
2739                                                    int digit_index) {
2740   return UncheckedCast<UintPtrT>(LoadObjectField(
2741       bigint, BigInt::kDigitsOffset + digit_index * kPointerSize,
2742       MachineType::UintPtr()));
2743 }
2744 
AllocateSeqOneByteString(int length,AllocationFlags flags)2745 TNode<String> CodeStubAssembler::AllocateSeqOneByteString(
2746     int length, AllocationFlags flags) {
2747   Comment("AllocateSeqOneByteString");
2748   if (length == 0) {
2749     return CAST(LoadRoot(Heap::kempty_stringRootIndex));
2750   }
2751   Node* result = Allocate(SeqOneByteString::SizeFor(length), flags);
2752   DCHECK(Heap::RootIsImmortalImmovable(Heap::kOneByteStringMapRootIndex));
2753   StoreMapNoWriteBarrier(result, Heap::kOneByteStringMapRootIndex);
2754   StoreObjectFieldNoWriteBarrier(result, SeqOneByteString::kLengthOffset,
2755                                  SmiConstant(length),
2756                                  MachineRepresentation::kTagged);
2757   StoreObjectFieldNoWriteBarrier(result, SeqOneByteString::kHashFieldSlot,
2758                                  IntPtrConstant(String::kEmptyHashField),
2759                                  MachineType::PointerRepresentation());
2760   return CAST(result);
2761 }
2762 
IsZeroOrContext(SloppyTNode<Object> object)2763 TNode<BoolT> CodeStubAssembler::IsZeroOrContext(SloppyTNode<Object> object) {
2764   return Select<BoolT>(WordEqual(object, SmiConstant(0)),
2765                        [=] { return Int32TrueConstant(); },
2766                        [=] { return IsContext(CAST(object)); });
2767 }
2768 
AllocateSeqOneByteString(Node * context,TNode<Smi> length,AllocationFlags flags)2769 TNode<String> CodeStubAssembler::AllocateSeqOneByteString(
2770     Node* context, TNode<Smi> length, AllocationFlags flags) {
2771   Comment("AllocateSeqOneByteString");
2772   CSA_SLOW_ASSERT(this, IsZeroOrContext(context));
2773   VARIABLE(var_result, MachineRepresentation::kTagged);
2774 
2775   // Compute the SeqOneByteString size and check if it fits into new space.
2776   Label if_lengthiszero(this), if_sizeissmall(this),
2777       if_notsizeissmall(this, Label::kDeferred), if_join(this);
2778   GotoIf(SmiEqual(length, SmiConstant(0)), &if_lengthiszero);
2779 
2780   Node* raw_size = GetArrayAllocationSize(
2781       SmiUntag(length), UINT8_ELEMENTS, INTPTR_PARAMETERS,
2782       SeqOneByteString::kHeaderSize + kObjectAlignmentMask);
2783   Node* size = WordAnd(raw_size, IntPtrConstant(~kObjectAlignmentMask));
2784   Branch(IntPtrLessThanOrEqual(size, IntPtrConstant(kMaxRegularHeapObjectSize)),
2785          &if_sizeissmall, &if_notsizeissmall);
2786 
2787   BIND(&if_sizeissmall);
2788   {
2789     // Just allocate the SeqOneByteString in new space.
2790     Node* result = AllocateInNewSpace(size, flags);
2791     DCHECK(Heap::RootIsImmortalImmovable(Heap::kOneByteStringMapRootIndex));
2792     StoreMapNoWriteBarrier(result, Heap::kOneByteStringMapRootIndex);
2793     StoreObjectFieldNoWriteBarrier(result, SeqOneByteString::kLengthOffset,
2794                                    length, MachineRepresentation::kTagged);
2795     StoreObjectFieldNoWriteBarrier(result, SeqOneByteString::kHashFieldSlot,
2796                                    IntPtrConstant(String::kEmptyHashField),
2797                                    MachineType::PointerRepresentation());
2798     var_result.Bind(result);
2799     Goto(&if_join);
2800   }
2801 
2802   BIND(&if_notsizeissmall);
2803   {
2804     // We might need to allocate in large object space, go to the runtime.
2805     Node* result =
2806         CallRuntime(Runtime::kAllocateSeqOneByteString, context, length);
2807     var_result.Bind(result);
2808     Goto(&if_join);
2809   }
2810 
2811   BIND(&if_lengthiszero);
2812   {
2813     var_result.Bind(LoadRoot(Heap::kempty_stringRootIndex));
2814     Goto(&if_join);
2815   }
2816 
2817   BIND(&if_join);
2818   return CAST(var_result.value());
2819 }
2820 
AllocateSeqTwoByteString(int length,AllocationFlags flags)2821 TNode<String> CodeStubAssembler::AllocateSeqTwoByteString(
2822     int length, AllocationFlags flags) {
2823   Comment("AllocateSeqTwoByteString");
2824   if (length == 0) {
2825     return CAST(LoadRoot(Heap::kempty_stringRootIndex));
2826   }
2827   Node* result = Allocate(SeqTwoByteString::SizeFor(length), flags);
2828   DCHECK(Heap::RootIsImmortalImmovable(Heap::kStringMapRootIndex));
2829   StoreMapNoWriteBarrier(result, Heap::kStringMapRootIndex);
2830   StoreObjectFieldNoWriteBarrier(result, SeqTwoByteString::kLengthOffset,
2831                                  SmiConstant(Smi::FromInt(length)),
2832                                  MachineRepresentation::kTagged);
2833   StoreObjectFieldNoWriteBarrier(result, SeqTwoByteString::kHashFieldSlot,
2834                                  IntPtrConstant(String::kEmptyHashField),
2835                                  MachineType::PointerRepresentation());
2836   return CAST(result);
2837 }
2838 
AllocateSeqTwoByteString(Node * context,TNode<Smi> length,AllocationFlags flags)2839 TNode<String> CodeStubAssembler::AllocateSeqTwoByteString(
2840     Node* context, TNode<Smi> length, AllocationFlags flags) {
2841   CSA_SLOW_ASSERT(this, IsZeroOrContext(context));
2842   Comment("AllocateSeqTwoByteString");
2843   VARIABLE(var_result, MachineRepresentation::kTagged);
2844 
2845   // Compute the SeqTwoByteString size and check if it fits into new space.
2846   Label if_lengthiszero(this), if_sizeissmall(this),
2847       if_notsizeissmall(this, Label::kDeferred), if_join(this);
2848   GotoIf(SmiEqual(length, SmiConstant(0)), &if_lengthiszero);
2849 
2850   Node* raw_size = GetArrayAllocationSize(
2851       SmiUntag(length), UINT16_ELEMENTS, INTPTR_PARAMETERS,
2852       SeqOneByteString::kHeaderSize + kObjectAlignmentMask);
2853   Node* size = WordAnd(raw_size, IntPtrConstant(~kObjectAlignmentMask));
2854   Branch(IntPtrLessThanOrEqual(size, IntPtrConstant(kMaxRegularHeapObjectSize)),
2855          &if_sizeissmall, &if_notsizeissmall);
2856 
2857   BIND(&if_sizeissmall);
2858   {
2859     // Just allocate the SeqTwoByteString in new space.
2860     Node* result = AllocateInNewSpace(size, flags);
2861     DCHECK(Heap::RootIsImmortalImmovable(Heap::kStringMapRootIndex));
2862     StoreMapNoWriteBarrier(result, Heap::kStringMapRootIndex);
2863     StoreObjectFieldNoWriteBarrier(result, SeqTwoByteString::kLengthOffset,
2864                                    length, MachineRepresentation::kTagged);
2865     StoreObjectFieldNoWriteBarrier(result, SeqTwoByteString::kHashFieldSlot,
2866                                    IntPtrConstant(String::kEmptyHashField),
2867                                    MachineType::PointerRepresentation());
2868     var_result.Bind(result);
2869     Goto(&if_join);
2870   }
2871 
2872   BIND(&if_notsizeissmall);
2873   {
2874     // We might need to allocate in large object space, go to the runtime.
2875     Node* result =
2876         CallRuntime(Runtime::kAllocateSeqTwoByteString, context, length);
2877     var_result.Bind(result);
2878     Goto(&if_join);
2879   }
2880 
2881   BIND(&if_lengthiszero);
2882   {
2883     var_result.Bind(LoadRoot(Heap::kempty_stringRootIndex));
2884     Goto(&if_join);
2885   }
2886 
2887   BIND(&if_join);
2888   return CAST(var_result.value());
2889 }
2890 
AllocateSlicedString(Heap::RootListIndex map_root_index,TNode<Smi> length,TNode<String> parent,TNode<Smi> offset)2891 TNode<String> CodeStubAssembler::AllocateSlicedString(
2892     Heap::RootListIndex map_root_index, TNode<Smi> length, TNode<String> parent,
2893     TNode<Smi> offset) {
2894   DCHECK(map_root_index == Heap::kSlicedOneByteStringMapRootIndex ||
2895          map_root_index == Heap::kSlicedStringMapRootIndex);
2896   Node* result = Allocate(SlicedString::kSize);
2897   DCHECK(Heap::RootIsImmortalImmovable(map_root_index));
2898   StoreMapNoWriteBarrier(result, map_root_index);
2899   StoreObjectFieldNoWriteBarrier(result, SlicedString::kLengthOffset, length,
2900                                  MachineRepresentation::kTagged);
2901   StoreObjectFieldNoWriteBarrier(result, SlicedString::kHashFieldSlot,
2902                                  IntPtrConstant(String::kEmptyHashField),
2903                                  MachineType::PointerRepresentation());
2904   StoreObjectFieldNoWriteBarrier(result, SlicedString::kParentOffset, parent,
2905                                  MachineRepresentation::kTagged);
2906   StoreObjectFieldNoWriteBarrier(result, SlicedString::kOffsetOffset, offset,
2907                                  MachineRepresentation::kTagged);
2908   return CAST(result);
2909 }
2910 
AllocateSlicedOneByteString(TNode<Smi> length,TNode<String> parent,TNode<Smi> offset)2911 TNode<String> CodeStubAssembler::AllocateSlicedOneByteString(
2912     TNode<Smi> length, TNode<String> parent, TNode<Smi> offset) {
2913   return AllocateSlicedString(Heap::kSlicedOneByteStringMapRootIndex, length,
2914                               parent, offset);
2915 }
2916 
AllocateSlicedTwoByteString(TNode<Smi> length,TNode<String> parent,TNode<Smi> offset)2917 TNode<String> CodeStubAssembler::AllocateSlicedTwoByteString(
2918     TNode<Smi> length, TNode<String> parent, TNode<Smi> offset) {
2919   return AllocateSlicedString(Heap::kSlicedStringMapRootIndex, length, parent,
2920                               offset);
2921 }
2922 
AllocateConsString(Heap::RootListIndex map_root_index,TNode<Smi> length,TNode<String> first,TNode<String> second,AllocationFlags flags)2923 TNode<String> CodeStubAssembler::AllocateConsString(
2924     Heap::RootListIndex map_root_index, TNode<Smi> length, TNode<String> first,
2925     TNode<String> second, AllocationFlags flags) {
2926   DCHECK(map_root_index == Heap::kConsOneByteStringMapRootIndex ||
2927          map_root_index == Heap::kConsStringMapRootIndex);
2928   Node* result = Allocate(ConsString::kSize, flags);
2929   DCHECK(Heap::RootIsImmortalImmovable(map_root_index));
2930   StoreMapNoWriteBarrier(result, map_root_index);
2931   StoreObjectFieldNoWriteBarrier(result, ConsString::kLengthOffset, length,
2932                                  MachineRepresentation::kTagged);
2933   StoreObjectFieldNoWriteBarrier(result, ConsString::kHashFieldSlot,
2934                                  IntPtrConstant(String::kEmptyHashField),
2935                                  MachineType::PointerRepresentation());
2936   bool const new_space = !(flags & kPretenured);
2937   if (new_space) {
2938     StoreObjectFieldNoWriteBarrier(result, ConsString::kFirstOffset, first,
2939                                    MachineRepresentation::kTagged);
2940     StoreObjectFieldNoWriteBarrier(result, ConsString::kSecondOffset, second,
2941                                    MachineRepresentation::kTagged);
2942   } else {
2943     StoreObjectField(result, ConsString::kFirstOffset, first);
2944     StoreObjectField(result, ConsString::kSecondOffset, second);
2945   }
2946   return CAST(result);
2947 }
2948 
AllocateOneByteConsString(TNode<Smi> length,TNode<String> first,TNode<String> second,AllocationFlags flags)2949 TNode<String> CodeStubAssembler::AllocateOneByteConsString(
2950     TNode<Smi> length, TNode<String> first, TNode<String> second,
2951     AllocationFlags flags) {
2952   return AllocateConsString(Heap::kConsOneByteStringMapRootIndex, length, first,
2953                             second, flags);
2954 }
2955 
AllocateTwoByteConsString(TNode<Smi> length,TNode<String> first,TNode<String> second,AllocationFlags flags)2956 TNode<String> CodeStubAssembler::AllocateTwoByteConsString(
2957     TNode<Smi> length, TNode<String> first, TNode<String> second,
2958     AllocationFlags flags) {
2959   return AllocateConsString(Heap::kConsStringMapRootIndex, length, first,
2960                             second, flags);
2961 }
2962 
NewConsString(Node * context,TNode<Smi> length,TNode<String> left,TNode<String> right,AllocationFlags flags)2963 TNode<String> CodeStubAssembler::NewConsString(Node* context, TNode<Smi> length,
2964                                                TNode<String> left,
2965                                                TNode<String> right,
2966                                                AllocationFlags flags) {
2967   CSA_ASSERT(this, IsContext(context));
2968   // Added string can be a cons string.
2969   Comment("Allocating ConsString");
2970   Node* left_instance_type = LoadInstanceType(left);
2971   Node* right_instance_type = LoadInstanceType(right);
2972 
2973   // Compute intersection and difference of instance types.
2974   Node* anded_instance_types =
2975       Word32And(left_instance_type, right_instance_type);
2976   Node* xored_instance_types =
2977       Word32Xor(left_instance_type, right_instance_type);
2978 
2979   // We create a one-byte cons string if
2980   // 1. both strings are one-byte, or
2981   // 2. at least one of the strings is two-byte, but happens to contain only
2982   //    one-byte characters.
2983   // To do this, we check
2984   // 1. if both strings are one-byte, or if the one-byte data hint is set in
2985   //    both strings, or
2986   // 2. if one of the strings has the one-byte data hint set and the other
2987   //    string is one-byte.
2988   STATIC_ASSERT(kOneByteStringTag != 0);
2989   STATIC_ASSERT(kOneByteDataHintTag != 0);
2990   Label one_byte_map(this);
2991   Label two_byte_map(this);
2992   TVARIABLE(String, result);
2993   Label done(this, &result);
2994   GotoIf(IsSetWord32(anded_instance_types,
2995                      kStringEncodingMask | kOneByteDataHintTag),
2996          &one_byte_map);
2997   Branch(Word32NotEqual(Word32And(xored_instance_types,
2998                                   Int32Constant(kStringEncodingMask |
2999                                                 kOneByteDataHintMask)),
3000                         Int32Constant(kOneByteStringTag | kOneByteDataHintTag)),
3001          &two_byte_map, &one_byte_map);
3002 
3003   BIND(&one_byte_map);
3004   Comment("One-byte ConsString");
3005   result = AllocateOneByteConsString(length, left, right, flags);
3006   Goto(&done);
3007 
3008   BIND(&two_byte_map);
3009   Comment("Two-byte ConsString");
3010   result = AllocateTwoByteConsString(length, left, right, flags);
3011   Goto(&done);
3012 
3013   BIND(&done);
3014 
3015   return result.value();
3016 }
3017 
AllocateNameDictionary(int at_least_space_for)3018 TNode<NameDictionary> CodeStubAssembler::AllocateNameDictionary(
3019     int at_least_space_for) {
3020   return AllocateNameDictionary(IntPtrConstant(at_least_space_for));
3021 }
3022 
AllocateNameDictionary(TNode<IntPtrT> at_least_space_for)3023 TNode<NameDictionary> CodeStubAssembler::AllocateNameDictionary(
3024     TNode<IntPtrT> at_least_space_for) {
3025   CSA_ASSERT(this, UintPtrLessThanOrEqual(
3026                        at_least_space_for,
3027                        IntPtrConstant(NameDictionary::kMaxCapacity)));
3028   TNode<IntPtrT> capacity = HashTableComputeCapacity(at_least_space_for);
3029   return AllocateNameDictionaryWithCapacity(capacity);
3030 }
3031 
AllocateNameDictionaryWithCapacity(TNode<IntPtrT> capacity)3032 TNode<NameDictionary> CodeStubAssembler::AllocateNameDictionaryWithCapacity(
3033     TNode<IntPtrT> capacity) {
3034   CSA_ASSERT(this, WordIsPowerOfTwo(capacity));
3035   CSA_ASSERT(this, IntPtrGreaterThan(capacity, IntPtrConstant(0)));
3036   TNode<IntPtrT> length = EntryToIndex<NameDictionary>(capacity);
3037   TNode<WordT> store_size = IntPtrAdd(
3038       TimesPointerSize(length), IntPtrConstant(NameDictionary::kHeaderSize));
3039 
3040   Node* result = AllocateInNewSpace(store_size);
3041   Comment("Initialize NameDictionary");
3042   // Initialize FixedArray fields.
3043   DCHECK(Heap::RootIsImmortalImmovable(Heap::kNameDictionaryMapRootIndex));
3044   StoreMapNoWriteBarrier(result, Heap::kNameDictionaryMapRootIndex);
3045   StoreObjectFieldNoWriteBarrier(result, FixedArray::kLengthOffset,
3046                                  SmiFromIntPtr(length));
3047   // Initialized HashTable fields.
3048   TNode<Smi> zero = SmiConstant(0);
3049   StoreFixedArrayElement(result, NameDictionary::kNumberOfElementsIndex, zero,
3050                          SKIP_WRITE_BARRIER);
3051   StoreFixedArrayElement(result, NameDictionary::kNumberOfDeletedElementsIndex,
3052                          zero, SKIP_WRITE_BARRIER);
3053   StoreFixedArrayElement(result, NameDictionary::kCapacityIndex,
3054                          SmiTag(capacity), SKIP_WRITE_BARRIER);
3055   // Initialize Dictionary fields.
3056   TNode<HeapObject> filler = UndefinedConstant();
3057   StoreFixedArrayElement(result, NameDictionary::kNextEnumerationIndexIndex,
3058                          SmiConstant(PropertyDetails::kInitialIndex),
3059                          SKIP_WRITE_BARRIER);
3060   StoreFixedArrayElement(result, NameDictionary::kObjectHashIndex,
3061                          SmiConstant(PropertyArray::kNoHashSentinel),
3062                          SKIP_WRITE_BARRIER);
3063 
3064   // Initialize NameDictionary elements.
3065   TNode<WordT> result_word = BitcastTaggedToWord(result);
3066   TNode<WordT> start_address = IntPtrAdd(
3067       result_word, IntPtrConstant(NameDictionary::OffsetOfElementAt(
3068                                       NameDictionary::kElementsStartIndex) -
3069                                   kHeapObjectTag));
3070   TNode<WordT> end_address = IntPtrAdd(
3071       result_word, IntPtrSub(store_size, IntPtrConstant(kHeapObjectTag)));
3072   StoreFieldsNoWriteBarrier(start_address, end_address, filler);
3073   return CAST(result);
3074 }
3075 
CopyNameDictionary(TNode<NameDictionary> dictionary,Label * large_object_fallback)3076 TNode<NameDictionary> CodeStubAssembler::CopyNameDictionary(
3077     TNode<NameDictionary> dictionary, Label* large_object_fallback) {
3078   Comment("Copy boilerplate property dict");
3079   TNode<IntPtrT> capacity = SmiUntag(GetCapacity<NameDictionary>(dictionary));
3080   CSA_ASSERT(this, IntPtrGreaterThanOrEqual(capacity, IntPtrConstant(0)));
3081   GotoIf(UintPtrGreaterThan(
3082              capacity, IntPtrConstant(NameDictionary::kMaxRegularCapacity)),
3083          large_object_fallback);
3084   TNode<NameDictionary> properties =
3085       AllocateNameDictionaryWithCapacity(capacity);
3086   TNode<IntPtrT> length = SmiUntag(LoadFixedArrayBaseLength(dictionary));
3087   CopyFixedArrayElements(PACKED_ELEMENTS, dictionary, properties, length,
3088                          SKIP_WRITE_BARRIER, INTPTR_PARAMETERS);
3089   return properties;
3090 }
3091 
3092 template <typename CollectionType>
AllocateOrderedHashTable()3093 Node* CodeStubAssembler::AllocateOrderedHashTable() {
3094   static const int kCapacity = CollectionType::kMinCapacity;
3095   static const int kBucketCount = kCapacity / CollectionType::kLoadFactor;
3096   static const int kDataTableLength = kCapacity * CollectionType::kEntrySize;
3097   static const int kFixedArrayLength =
3098       CollectionType::kHashTableStartIndex + kBucketCount + kDataTableLength;
3099   static const int kDataTableStartIndex =
3100       CollectionType::kHashTableStartIndex + kBucketCount;
3101 
3102   STATIC_ASSERT(base::bits::IsPowerOfTwo(kCapacity));
3103   STATIC_ASSERT(kCapacity <= CollectionType::kMaxCapacity);
3104 
3105   // Allocate the table and add the proper map.
3106   const ElementsKind elements_kind = HOLEY_ELEMENTS;
3107   Node* const length_intptr = IntPtrConstant(kFixedArrayLength);
3108   Node* const fixed_array_map = LoadRoot(
3109       static_cast<Heap::RootListIndex>(CollectionType::GetMapRootIndex()));
3110   Node* const table =
3111       AllocateFixedArray(elements_kind, length_intptr, INTPTR_PARAMETERS,
3112                          kAllowLargeObjectAllocation, fixed_array_map);
3113 
3114   // Initialize the OrderedHashTable fields.
3115   const WriteBarrierMode barrier_mode = SKIP_WRITE_BARRIER;
3116   StoreFixedArrayElement(table, CollectionType::kNumberOfElementsIndex,
3117                          SmiConstant(0), barrier_mode);
3118   StoreFixedArrayElement(table, CollectionType::kNumberOfDeletedElementsIndex,
3119                          SmiConstant(0), barrier_mode);
3120   StoreFixedArrayElement(table, CollectionType::kNumberOfBucketsIndex,
3121                          SmiConstant(kBucketCount), barrier_mode);
3122 
3123   // Fill the buckets with kNotFound.
3124   Node* const not_found = SmiConstant(CollectionType::kNotFound);
3125   STATIC_ASSERT(CollectionType::kHashTableStartIndex ==
3126                 CollectionType::kNumberOfBucketsIndex + 1);
3127   STATIC_ASSERT((CollectionType::kHashTableStartIndex + kBucketCount) ==
3128                 kDataTableStartIndex);
3129   for (int i = 0; i < kBucketCount; i++) {
3130     StoreFixedArrayElement(table, CollectionType::kHashTableStartIndex + i,
3131                            not_found, barrier_mode);
3132   }
3133 
3134   // Fill the data table with undefined.
3135   STATIC_ASSERT(kDataTableStartIndex + kDataTableLength == kFixedArrayLength);
3136   for (int i = 0; i < kDataTableLength; i++) {
3137     StoreFixedArrayElement(table, kDataTableStartIndex + i, UndefinedConstant(),
3138                            barrier_mode);
3139   }
3140 
3141   return table;
3142 }
3143 
3144 template Node* CodeStubAssembler::AllocateOrderedHashTable<OrderedHashMap>();
3145 template Node* CodeStubAssembler::AllocateOrderedHashTable<OrderedHashSet>();
3146 
3147 template <typename CollectionType>
FindOrderedHashTableEntry(Node * table,Node * hash,std::function<void (Node *,Label *,Label *)> key_compare,Variable * entry_start_position,Label * entry_found,Label * not_found)3148 void CodeStubAssembler::FindOrderedHashTableEntry(
3149     Node* table, Node* hash,
3150     std::function<void(Node*, Label*, Label*)> key_compare,
3151     Variable* entry_start_position, Label* entry_found, Label* not_found) {
3152   // Get the index of the bucket.
3153   Node* const number_of_buckets = SmiUntag(CAST(
3154       LoadFixedArrayElement(table, CollectionType::kNumberOfBucketsIndex)));
3155   Node* const bucket =
3156       WordAnd(hash, IntPtrSub(number_of_buckets, IntPtrConstant(1)));
3157   Node* const first_entry = SmiUntag(CAST(LoadFixedArrayElement(
3158       table, bucket, CollectionType::kHashTableStartIndex * kPointerSize)));
3159 
3160   // Walk the bucket chain.
3161   Node* entry_start;
3162   Label if_key_found(this);
3163   {
3164     VARIABLE(var_entry, MachineType::PointerRepresentation(), first_entry);
3165     Label loop(this, {&var_entry, entry_start_position}),
3166         continue_next_entry(this);
3167     Goto(&loop);
3168     BIND(&loop);
3169 
3170     // If the entry index is the not-found sentinel, we are done.
3171     GotoIf(
3172         WordEqual(var_entry.value(), IntPtrConstant(CollectionType::kNotFound)),
3173         not_found);
3174 
3175     // Make sure the entry index is within range.
3176     CSA_ASSERT(
3177         this,
3178         UintPtrLessThan(
3179             var_entry.value(),
3180             SmiUntag(SmiAdd(
3181                 CAST(LoadFixedArrayElement(
3182                     table, CollectionType::kNumberOfElementsIndex)),
3183                 CAST(LoadFixedArrayElement(
3184                     table, CollectionType::kNumberOfDeletedElementsIndex))))));
3185 
3186     // Compute the index of the entry relative to kHashTableStartIndex.
3187     entry_start =
3188         IntPtrAdd(IntPtrMul(var_entry.value(),
3189                             IntPtrConstant(CollectionType::kEntrySize)),
3190                   number_of_buckets);
3191 
3192     // Load the key from the entry.
3193     Node* const candidate_key = LoadFixedArrayElement(
3194         table, entry_start,
3195         CollectionType::kHashTableStartIndex * kPointerSize);
3196 
3197     key_compare(candidate_key, &if_key_found, &continue_next_entry);
3198 
3199     BIND(&continue_next_entry);
3200     // Load the index of the next entry in the bucket chain.
3201     var_entry.Bind(SmiUntag(CAST(LoadFixedArrayElement(
3202         table, entry_start,
3203         (CollectionType::kHashTableStartIndex + CollectionType::kChainOffset) *
3204             kPointerSize))));
3205 
3206     Goto(&loop);
3207   }
3208 
3209   BIND(&if_key_found);
3210   entry_start_position->Bind(entry_start);
3211   Goto(entry_found);
3212 }
3213 
3214 template void CodeStubAssembler::FindOrderedHashTableEntry<OrderedHashMap>(
3215     Node* table, Node* hash,
3216     std::function<void(Node*, Label*, Label*)> key_compare,
3217     Variable* entry_start_position, Label* entry_found, Label* not_found);
3218 template void CodeStubAssembler::FindOrderedHashTableEntry<OrderedHashSet>(
3219     Node* table, Node* hash,
3220     std::function<void(Node*, Label*, Label*)> key_compare,
3221     Variable* entry_start_position, Label* entry_found, Label* not_found);
3222 
AllocateStruct(Node * map,AllocationFlags flags)3223 Node* CodeStubAssembler::AllocateStruct(Node* map, AllocationFlags flags) {
3224   Comment("AllocateStruct");
3225   CSA_ASSERT(this, IsMap(map));
3226   Node* size = TimesPointerSize(LoadMapInstanceSizeInWords(map));
3227   Node* object = Allocate(size, flags);
3228   StoreMapNoWriteBarrier(object, map);
3229   InitializeStructBody(object, map, size, Struct::kHeaderSize);
3230   return object;
3231 }
3232 
InitializeStructBody(Node * object,Node * map,Node * size,int start_offset)3233 void CodeStubAssembler::InitializeStructBody(Node* object, Node* map,
3234                                              Node* size, int start_offset) {
3235   CSA_SLOW_ASSERT(this, IsMap(map));
3236   Comment("InitializeStructBody");
3237   Node* filler = UndefinedConstant();
3238   // Calculate the untagged field addresses.
3239   object = BitcastTaggedToWord(object);
3240   Node* start_address =
3241       IntPtrAdd(object, IntPtrConstant(start_offset - kHeapObjectTag));
3242   Node* end_address =
3243       IntPtrSub(IntPtrAdd(object, size), IntPtrConstant(kHeapObjectTag));
3244   StoreFieldsNoWriteBarrier(start_address, end_address, filler);
3245 }
3246 
AllocateJSObjectFromMap(Node * map,Node * properties,Node * elements,AllocationFlags flags,SlackTrackingMode slack_tracking_mode)3247 Node* CodeStubAssembler::AllocateJSObjectFromMap(
3248     Node* map, Node* properties, Node* elements, AllocationFlags flags,
3249     SlackTrackingMode slack_tracking_mode) {
3250   CSA_ASSERT(this, IsMap(map));
3251   CSA_ASSERT(this, Word32BinaryNot(IsJSFunctionMap(map)));
3252   CSA_ASSERT(this, Word32BinaryNot(InstanceTypeEqual(LoadMapInstanceType(map),
3253                                                      JS_GLOBAL_OBJECT_TYPE)));
3254   Node* instance_size = TimesPointerSize(LoadMapInstanceSizeInWords(map));
3255   Node* object = AllocateInNewSpace(instance_size, flags);
3256   StoreMapNoWriteBarrier(object, map);
3257   InitializeJSObjectFromMap(object, map, instance_size, properties, elements,
3258                             slack_tracking_mode);
3259   return object;
3260 }
3261 
InitializeJSObjectFromMap(Node * object,Node * map,Node * instance_size,Node * properties,Node * elements,SlackTrackingMode slack_tracking_mode)3262 void CodeStubAssembler::InitializeJSObjectFromMap(
3263     Node* object, Node* map, Node* instance_size, Node* properties,
3264     Node* elements, SlackTrackingMode slack_tracking_mode) {
3265   CSA_SLOW_ASSERT(this, IsMap(map));
3266   // This helper assumes that the object is in new-space, as guarded by the
3267   // check in AllocatedJSObjectFromMap.
3268   if (properties == nullptr) {
3269     CSA_ASSERT(this, Word32BinaryNot(IsDictionaryMap((map))));
3270     StoreObjectFieldRoot(object, JSObject::kPropertiesOrHashOffset,
3271                          Heap::kEmptyFixedArrayRootIndex);
3272   } else {
3273     CSA_ASSERT(this, Word32Or(Word32Or(IsPropertyArray(properties),
3274                                        IsDictionary(properties)),
3275                               IsEmptyFixedArray(properties)));
3276     StoreObjectFieldNoWriteBarrier(object, JSObject::kPropertiesOrHashOffset,
3277                                    properties);
3278   }
3279   if (elements == nullptr) {
3280     StoreObjectFieldRoot(object, JSObject::kElementsOffset,
3281                          Heap::kEmptyFixedArrayRootIndex);
3282   } else {
3283     CSA_ASSERT(this, IsFixedArray(elements));
3284     StoreObjectFieldNoWriteBarrier(object, JSObject::kElementsOffset, elements);
3285   }
3286   if (slack_tracking_mode == kNoSlackTracking) {
3287     InitializeJSObjectBodyNoSlackTracking(object, map, instance_size);
3288   } else {
3289     DCHECK_EQ(slack_tracking_mode, kWithSlackTracking);
3290     InitializeJSObjectBodyWithSlackTracking(object, map, instance_size);
3291   }
3292 }
3293 
InitializeJSObjectBodyNoSlackTracking(Node * object,Node * map,Node * instance_size,int start_offset)3294 void CodeStubAssembler::InitializeJSObjectBodyNoSlackTracking(
3295     Node* object, Node* map, Node* instance_size, int start_offset) {
3296   STATIC_ASSERT(Map::kNoSlackTracking == 0);
3297   CSA_ASSERT(
3298       this, IsClearWord32<Map::ConstructionCounterBits>(LoadMapBitField3(map)));
3299   InitializeFieldsWithRoot(object, IntPtrConstant(start_offset), instance_size,
3300                            Heap::kUndefinedValueRootIndex);
3301 }
3302 
InitializeJSObjectBodyWithSlackTracking(Node * object,Node * map,Node * instance_size)3303 void CodeStubAssembler::InitializeJSObjectBodyWithSlackTracking(
3304     Node* object, Node* map, Node* instance_size) {
3305   CSA_SLOW_ASSERT(this, IsMap(map));
3306   Comment("InitializeJSObjectBodyNoSlackTracking");
3307 
3308   // Perform in-object slack tracking if requested.
3309   int start_offset = JSObject::kHeaderSize;
3310   Node* bit_field3 = LoadMapBitField3(map);
3311   Label end(this), slack_tracking(this), complete(this, Label::kDeferred);
3312   STATIC_ASSERT(Map::kNoSlackTracking == 0);
3313   GotoIf(IsSetWord32<Map::ConstructionCounterBits>(bit_field3),
3314          &slack_tracking);
3315   Comment("No slack tracking");
3316   InitializeJSObjectBodyNoSlackTracking(object, map, instance_size);
3317   Goto(&end);
3318 
3319   BIND(&slack_tracking);
3320   {
3321     Comment("Decrease construction counter");
3322     // Slack tracking is only done on initial maps.
3323     CSA_ASSERT(this, IsUndefined(LoadMapBackPointer(map)));
3324     STATIC_ASSERT(Map::ConstructionCounterBits::kNext == 32);
3325     Node* new_bit_field3 = Int32Sub(
3326         bit_field3, Int32Constant(1 << Map::ConstructionCounterBits::kShift));
3327     StoreObjectFieldNoWriteBarrier(map, Map::kBitField3Offset, new_bit_field3,
3328                                    MachineRepresentation::kWord32);
3329     STATIC_ASSERT(Map::kSlackTrackingCounterEnd == 1);
3330 
3331     // The object still has in-object slack therefore the |unsed_or_unused|
3332     // field contain the "used" value.
3333     Node* used_size = TimesPointerSize(ChangeUint32ToWord(
3334         LoadObjectField(map, Map::kUsedOrUnusedInstanceSizeInWordsOffset,
3335                         MachineType::Uint8())));
3336 
3337     Comment("iInitialize filler fields");
3338     InitializeFieldsWithRoot(object, used_size, instance_size,
3339                              Heap::kOnePointerFillerMapRootIndex);
3340 
3341     Comment("Initialize undefined fields");
3342     InitializeFieldsWithRoot(object, IntPtrConstant(start_offset), used_size,
3343                              Heap::kUndefinedValueRootIndex);
3344 
3345     STATIC_ASSERT(Map::kNoSlackTracking == 0);
3346     GotoIf(IsClearWord32<Map::ConstructionCounterBits>(new_bit_field3),
3347            &complete);
3348     Goto(&end);
3349   }
3350 
3351   // Finalize the instance size.
3352   BIND(&complete);
3353   {
3354     // ComplextInobjectSlackTracking doesn't allocate and thus doesn't need a
3355     // context.
3356     CallRuntime(Runtime::kCompleteInobjectSlackTrackingForMap,
3357                 NoContextConstant(), map);
3358     Goto(&end);
3359   }
3360 
3361   BIND(&end);
3362 }
3363 
StoreFieldsNoWriteBarrier(Node * start_address,Node * end_address,Node * value)3364 void CodeStubAssembler::StoreFieldsNoWriteBarrier(Node* start_address,
3365                                                   Node* end_address,
3366                                                   Node* value) {
3367   Comment("StoreFieldsNoWriteBarrier");
3368   CSA_ASSERT(this, WordIsWordAligned(start_address));
3369   CSA_ASSERT(this, WordIsWordAligned(end_address));
3370   BuildFastLoop(start_address, end_address,
3371                 [this, value](Node* current) {
3372                   StoreNoWriteBarrier(MachineRepresentation::kTagged, current,
3373                                       value);
3374                 },
3375                 kPointerSize, INTPTR_PARAMETERS, IndexAdvanceMode::kPost);
3376 }
3377 
AllocateUninitializedJSArrayWithoutElements(Node * array_map,Node * length,Node * allocation_site)3378 Node* CodeStubAssembler::AllocateUninitializedJSArrayWithoutElements(
3379     Node* array_map, Node* length, Node* allocation_site) {
3380   Comment("begin allocation of JSArray without elements");
3381   CSA_SLOW_ASSERT(this, TaggedIsPositiveSmi(length));
3382   CSA_SLOW_ASSERT(this, IsMap(array_map));
3383   int base_size = JSArray::kSize;
3384   if (allocation_site != nullptr) {
3385     base_size += AllocationMemento::kSize;
3386   }
3387 
3388   Node* size = IntPtrConstant(base_size);
3389   Node* array =
3390       AllocateUninitializedJSArray(array_map, length, allocation_site, size);
3391   return array;
3392 }
3393 
3394 std::pair<Node*, Node*>
AllocateUninitializedJSArrayWithElements(ElementsKind kind,Node * array_map,Node * length,Node * allocation_site,Node * capacity,ParameterMode capacity_mode)3395 CodeStubAssembler::AllocateUninitializedJSArrayWithElements(
3396     ElementsKind kind, Node* array_map, Node* length, Node* allocation_site,
3397     Node* capacity, ParameterMode capacity_mode) {
3398   Comment("begin allocation of JSArray with elements");
3399   CSA_SLOW_ASSERT(this, TaggedIsPositiveSmi(length));
3400   CSA_SLOW_ASSERT(this, IsMap(array_map));
3401   int base_size = JSArray::kSize;
3402 
3403   if (allocation_site != nullptr) {
3404     base_size += AllocationMemento::kSize;
3405   }
3406 
3407   int elements_offset = base_size;
3408 
3409   // Compute space for elements
3410   base_size += FixedArray::kHeaderSize;
3411   Node* size = ElementOffsetFromIndex(capacity, kind, capacity_mode, base_size);
3412 
3413   Node* array =
3414       AllocateUninitializedJSArray(array_map, length, allocation_site, size);
3415 
3416   Node* elements = InnerAllocate(array, elements_offset);
3417   StoreObjectFieldNoWriteBarrier(array, JSObject::kElementsOffset, elements);
3418   // Setup elements object.
3419   STATIC_ASSERT(FixedArrayBase::kHeaderSize == 2 * kPointerSize);
3420   Heap::RootListIndex elements_map_index =
3421       IsDoubleElementsKind(kind) ? Heap::kFixedDoubleArrayMapRootIndex
3422                                  : Heap::kFixedArrayMapRootIndex;
3423   DCHECK(Heap::RootIsImmortalImmovable(elements_map_index));
3424   StoreMapNoWriteBarrier(elements, elements_map_index);
3425   TNode<Smi> capacity_smi = ParameterToTagged(capacity, capacity_mode);
3426   CSA_ASSERT(this, SmiGreaterThan(capacity_smi, SmiConstant(0)));
3427   StoreObjectFieldNoWriteBarrier(elements, FixedArray::kLengthOffset,
3428                                  capacity_smi);
3429   return {array, elements};
3430 }
3431 
AllocateUninitializedJSArray(Node * array_map,Node * length,Node * allocation_site,Node * size_in_bytes)3432 Node* CodeStubAssembler::AllocateUninitializedJSArray(Node* array_map,
3433                                                       Node* length,
3434                                                       Node* allocation_site,
3435                                                       Node* size_in_bytes) {
3436   CSA_SLOW_ASSERT(this, TaggedIsPositiveSmi(length));
3437   CSA_SLOW_ASSERT(this, IsMap(array_map));
3438 
3439   // Allocate space for the JSArray and the elements FixedArray in one go.
3440   Node* array = AllocateInNewSpace(size_in_bytes);
3441 
3442   Comment("write JSArray headers");
3443   StoreMapNoWriteBarrier(array, array_map);
3444 
3445   StoreObjectFieldNoWriteBarrier(array, JSArray::kLengthOffset, length);
3446 
3447   StoreObjectFieldRoot(array, JSArray::kPropertiesOrHashOffset,
3448                        Heap::kEmptyFixedArrayRootIndex);
3449 
3450   if (allocation_site != nullptr) {
3451     InitializeAllocationMemento(array, IntPtrConstant(JSArray::kSize),
3452                                 allocation_site);
3453   }
3454   return array;
3455 }
3456 
AllocateJSArray(ElementsKind kind,Node * array_map,Node * capacity,Node * length,Node * allocation_site,ParameterMode capacity_mode)3457 Node* CodeStubAssembler::AllocateJSArray(ElementsKind kind, Node* array_map,
3458                                          Node* capacity, Node* length,
3459                                          Node* allocation_site,
3460                                          ParameterMode capacity_mode) {
3461   CSA_SLOW_ASSERT(this, IsMap(array_map));
3462   CSA_SLOW_ASSERT(this, TaggedIsPositiveSmi(length));
3463   CSA_SLOW_ASSERT(this, MatchesParameterMode(capacity, capacity_mode));
3464 
3465   int capacity_as_constant;
3466   Node *array = nullptr, *elements = nullptr;
3467   if (IsIntPtrOrSmiConstantZero(capacity, capacity_mode)) {
3468     // Array is empty. Use the shared empty fixed array instead of allocating a
3469     // new one.
3470     array = AllocateUninitializedJSArrayWithoutElements(array_map, length,
3471                                                         allocation_site);
3472     StoreObjectFieldRoot(array, JSArray::kElementsOffset,
3473                          Heap::kEmptyFixedArrayRootIndex);
3474   } else if (TryGetIntPtrOrSmiConstantValue(capacity, &capacity_as_constant,
3475                                             capacity_mode) &&
3476              capacity_as_constant > 0) {
3477     // Allocate both array and elements object, and initialize the JSArray.
3478     std::tie(array, elements) = AllocateUninitializedJSArrayWithElements(
3479         kind, array_map, length, allocation_site, capacity, capacity_mode);
3480     // Fill in the elements with holes.
3481     FillFixedArrayWithValue(kind, elements,
3482                             IntPtrOrSmiConstant(0, capacity_mode), capacity,
3483                             Heap::kTheHoleValueRootIndex, capacity_mode);
3484   } else {
3485     Label out(this), empty(this), nonempty(this);
3486     VARIABLE(var_array, MachineRepresentation::kTagged);
3487 
3488     Branch(SmiEqual(ParameterToTagged(capacity, capacity_mode), SmiConstant(0)),
3489            &empty, &nonempty);
3490 
3491     BIND(&empty);
3492     {
3493       // Array is empty. Use the shared empty fixed array instead of allocating
3494       // a new one.
3495       var_array.Bind(AllocateUninitializedJSArrayWithoutElements(
3496           array_map, length, allocation_site));
3497       StoreObjectFieldRoot(var_array.value(), JSArray::kElementsOffset,
3498                            Heap::kEmptyFixedArrayRootIndex);
3499       Goto(&out);
3500     }
3501 
3502     BIND(&nonempty);
3503     {
3504       // Allocate both array and elements object, and initialize the JSArray.
3505       Node* array;
3506       std::tie(array, elements) = AllocateUninitializedJSArrayWithElements(
3507           kind, array_map, length, allocation_site, capacity, capacity_mode);
3508       var_array.Bind(array);
3509       // Fill in the elements with holes.
3510       FillFixedArrayWithValue(kind, elements,
3511                               IntPtrOrSmiConstant(0, capacity_mode), capacity,
3512                               Heap::kTheHoleValueRootIndex, capacity_mode);
3513       Goto(&out);
3514     }
3515 
3516     BIND(&out);
3517     array = var_array.value();
3518   }
3519 
3520   return array;
3521 }
3522 
ExtractFastJSArray(Node * context,Node * array,Node * begin,Node * count,ParameterMode mode,Node * capacity,Node * allocation_site)3523 Node* CodeStubAssembler::ExtractFastJSArray(Node* context, Node* array,
3524                                             Node* begin, Node* count,
3525                                             ParameterMode mode, Node* capacity,
3526                                             Node* allocation_site) {
3527   Node* original_array_map = LoadMap(array);
3528   Node* elements_kind = LoadMapElementsKind(original_array_map);
3529 
3530   // Use the cannonical map for the Array's ElementsKind
3531   Node* native_context = LoadNativeContext(context);
3532   Node* array_map = LoadJSArrayElementsMap(elements_kind, native_context);
3533 
3534   Node* new_elements =
3535       ExtractFixedArray(LoadElements(array), begin, count, capacity,
3536                         ExtractFixedArrayFlag::kAllFixedArrays, mode);
3537 
3538   Node* result = AllocateUninitializedJSArrayWithoutElements(
3539       array_map, ParameterToTagged(count, mode), allocation_site);
3540   StoreObjectField(result, JSObject::kElementsOffset, new_elements);
3541   return result;
3542 }
3543 
CloneFastJSArray(Node * context,Node * array,ParameterMode mode,Node * allocation_site)3544 Node* CodeStubAssembler::CloneFastJSArray(Node* context, Node* array,
3545                                           ParameterMode mode,
3546                                           Node* allocation_site) {
3547   Node* length = LoadJSArrayLength(array);
3548   Node* elements = LoadElements(array);
3549 
3550   Node* original_array_map = LoadMap(array);
3551   Node* elements_kind = LoadMapElementsKind(original_array_map);
3552 
3553   Node* new_elements = CloneFixedArray(elements);
3554 
3555   // Use the cannonical map for the Array's ElementsKind
3556   Node* native_context = LoadNativeContext(context);
3557   Node* array_map = LoadJSArrayElementsMap(elements_kind, native_context);
3558   Node* result = AllocateUninitializedJSArrayWithoutElements(array_map, length,
3559                                                              allocation_site);
3560   StoreObjectField(result, JSObject::kElementsOffset, new_elements);
3561   return result;
3562 }
3563 
AllocateFixedArray(ElementsKind kind,Node * capacity,ParameterMode mode,AllocationFlags flags,SloppyTNode<Map> fixed_array_map)3564 TNode<FixedArray> CodeStubAssembler::AllocateFixedArray(
3565     ElementsKind kind, Node* capacity, ParameterMode mode,
3566     AllocationFlags flags, SloppyTNode<Map> fixed_array_map) {
3567   Comment("AllocateFixedArray");
3568   CSA_SLOW_ASSERT(this, MatchesParameterMode(capacity, mode));
3569   CSA_ASSERT(this, IntPtrOrSmiGreaterThan(capacity,
3570                                           IntPtrOrSmiConstant(0, mode), mode));
3571   TNode<IntPtrT> total_size = GetFixedArrayAllocationSize(capacity, kind, mode);
3572 
3573   if (IsDoubleElementsKind(kind)) flags |= kDoubleAlignment;
3574   // Allocate both array and elements object, and initialize the JSArray.
3575   Node* array = Allocate(total_size, flags);
3576   if (fixed_array_map != nullptr) {
3577     // Conservatively only skip the write barrier if there are no allocation
3578     // flags, this ensures that the object hasn't ended up in LOS. Note that the
3579     // fixed array map is currently always immortal and technically wouldn't
3580     // need the write barrier even in LOS, but it's better to not take chances
3581     // in case this invariant changes later, since it's difficult to enforce
3582     // locally here.
3583     if (flags == CodeStubAssembler::kNone) {
3584       StoreMapNoWriteBarrier(array, fixed_array_map);
3585     } else {
3586       StoreMap(array, fixed_array_map);
3587     }
3588   } else {
3589     Heap::RootListIndex map_index = IsDoubleElementsKind(kind)
3590                                         ? Heap::kFixedDoubleArrayMapRootIndex
3591                                         : Heap::kFixedArrayMapRootIndex;
3592     DCHECK(Heap::RootIsImmortalImmovable(map_index));
3593     StoreMapNoWriteBarrier(array, map_index);
3594   }
3595   StoreObjectFieldNoWriteBarrier(array, FixedArray::kLengthOffset,
3596                                  ParameterToTagged(capacity, mode));
3597   return UncheckedCast<FixedArray>(array);
3598 }
3599 
ExtractFixedArray(Node * fixed_array,Node * first,Node * count,Node * capacity,ExtractFixedArrayFlags extract_flags,ParameterMode parameter_mode)3600 TNode<FixedArray> CodeStubAssembler::ExtractFixedArray(
3601     Node* fixed_array, Node* first, Node* count, Node* capacity,
3602     ExtractFixedArrayFlags extract_flags, ParameterMode parameter_mode) {
3603   VARIABLE(var_result, MachineRepresentation::kTagged);
3604   VARIABLE(var_fixed_array_map, MachineRepresentation::kTagged);
3605   const AllocationFlags flags =
3606       (extract_flags & ExtractFixedArrayFlag::kNewSpaceAllocationOnly)
3607           ? CodeStubAssembler::kNone
3608           : CodeStubAssembler::kAllowLargeObjectAllocation;
3609   if (first == nullptr) {
3610     first = IntPtrOrSmiConstant(0, parameter_mode);
3611   }
3612   if (count == nullptr) {
3613     count =
3614         IntPtrOrSmiSub(TaggedToParameter(LoadFixedArrayBaseLength(fixed_array),
3615                                          parameter_mode),
3616                        first, parameter_mode);
3617 
3618     CSA_ASSERT(
3619         this, IntPtrOrSmiLessThanOrEqual(IntPtrOrSmiConstant(0, parameter_mode),
3620                                          count, parameter_mode));
3621   }
3622   if (capacity == nullptr) {
3623     capacity = count;
3624   } else {
3625     CSA_ASSERT(this, Word32BinaryNot(IntPtrOrSmiGreaterThan(
3626                          IntPtrOrSmiAdd(first, count, parameter_mode), capacity,
3627                          parameter_mode)));
3628   }
3629 
3630   Label if_fixed_double_array(this), empty(this), cow(this),
3631       done(this, {&var_result, &var_fixed_array_map});
3632   var_fixed_array_map.Bind(LoadMap(fixed_array));
3633   GotoIf(WordEqual(IntPtrOrSmiConstant(0, parameter_mode), capacity), &empty);
3634 
3635   if (extract_flags & ExtractFixedArrayFlag::kFixedDoubleArrays) {
3636     if (extract_flags & ExtractFixedArrayFlag::kFixedArrays) {
3637       GotoIf(IsFixedDoubleArrayMap(var_fixed_array_map.value()),
3638              &if_fixed_double_array);
3639     } else {
3640       CSA_ASSERT(this, IsFixedDoubleArrayMap(var_fixed_array_map.value()));
3641     }
3642   } else {
3643     DCHECK(extract_flags & ExtractFixedArrayFlag::kFixedArrays);
3644     CSA_ASSERT(this, Word32BinaryNot(
3645                          IsFixedDoubleArrayMap(var_fixed_array_map.value())));
3646   }
3647 
3648   if (extract_flags & ExtractFixedArrayFlag::kFixedArrays) {
3649     Label new_space_check(this, {&var_fixed_array_map});
3650     Branch(WordEqual(var_fixed_array_map.value(),
3651                      LoadRoot(Heap::kFixedCOWArrayMapRootIndex)),
3652            &cow, &new_space_check);
3653 
3654     BIND(&new_space_check);
3655 
3656     bool handle_old_space = true;
3657     if (extract_flags & ExtractFixedArrayFlag::kNewSpaceAllocationOnly) {
3658       handle_old_space = false;
3659       CSA_ASSERT(this, Word32BinaryNot(FixedArraySizeDoesntFitInNewSpace(
3660                            count, FixedArray::kHeaderSize, parameter_mode)));
3661     } else {
3662       int constant_count;
3663       handle_old_space =
3664           !TryGetIntPtrOrSmiConstantValue(count, &constant_count,
3665                                           parameter_mode) ||
3666           (constant_count >
3667            FixedArray::GetMaxLengthForNewSpaceAllocation(PACKED_ELEMENTS));
3668     }
3669 
3670     Label old_space(this, Label::kDeferred);
3671     if (handle_old_space) {
3672       GotoIfFixedArraySizeDoesntFitInNewSpace(
3673           capacity, &old_space, FixedArray::kHeaderSize, parameter_mode);
3674     }
3675 
3676     Comment("Copy PACKED_ELEMENTS new space");
3677 
3678     ElementsKind kind = PACKED_ELEMENTS;
3679     Node* to_elements =
3680         AllocateFixedArray(kind, capacity, parameter_mode,
3681                            AllocationFlag::kNone, var_fixed_array_map.value());
3682     var_result.Bind(to_elements);
3683     CopyFixedArrayElements(kind, fixed_array, kind, to_elements, first, count,
3684                            capacity, SKIP_WRITE_BARRIER, parameter_mode);
3685     Goto(&done);
3686 
3687     if (handle_old_space) {
3688       BIND(&old_space);
3689       {
3690         Comment("Copy PACKED_ELEMENTS old space");
3691 
3692         to_elements = AllocateFixedArray(kind, capacity, parameter_mode, flags,
3693                                          var_fixed_array_map.value());
3694         var_result.Bind(to_elements);
3695         CopyFixedArrayElements(kind, fixed_array, kind, to_elements, first,
3696                                count, capacity, UPDATE_WRITE_BARRIER,
3697                                parameter_mode);
3698         Goto(&done);
3699       }
3700     }
3701 
3702     BIND(&cow);
3703     {
3704       if (extract_flags & ExtractFixedArrayFlag::kDontCopyCOW) {
3705         GotoIf(WordNotEqual(IntPtrOrSmiConstant(0, parameter_mode), first),
3706                &new_space_check);
3707 
3708         var_result.Bind(fixed_array);
3709         Goto(&done);
3710       } else {
3711         var_fixed_array_map.Bind(LoadRoot(Heap::kFixedArrayMapRootIndex));
3712         Goto(&new_space_check);
3713       }
3714     }
3715   } else {
3716     Goto(&if_fixed_double_array);
3717   }
3718 
3719   if (extract_flags & ExtractFixedArrayFlag::kFixedDoubleArrays) {
3720     BIND(&if_fixed_double_array);
3721 
3722     Comment("Copy PACKED_DOUBLE_ELEMENTS");
3723 
3724     ElementsKind kind = PACKED_DOUBLE_ELEMENTS;
3725     Node* to_elements = AllocateFixedArray(kind, capacity, parameter_mode,
3726                                            flags, var_fixed_array_map.value());
3727     var_result.Bind(to_elements);
3728     CopyFixedArrayElements(kind, fixed_array, kind, to_elements, first, count,
3729                            capacity, SKIP_WRITE_BARRIER, parameter_mode);
3730 
3731     Goto(&done);
3732   }
3733 
3734   BIND(&empty);
3735   {
3736     Comment("Copy empty array");
3737 
3738     var_result.Bind(EmptyFixedArrayConstant());
3739     Goto(&done);
3740   }
3741 
3742   BIND(&done);
3743   return UncheckedCast<FixedArray>(var_result.value());
3744 }
3745 
InitializePropertyArrayLength(Node * property_array,Node * length,ParameterMode mode)3746 void CodeStubAssembler::InitializePropertyArrayLength(Node* property_array,
3747                                                       Node* length,
3748                                                       ParameterMode mode) {
3749   CSA_SLOW_ASSERT(this, IsPropertyArray(property_array));
3750   CSA_ASSERT(
3751       this, IntPtrOrSmiGreaterThan(length, IntPtrOrSmiConstant(0, mode), mode));
3752   CSA_ASSERT(
3753       this,
3754       IntPtrOrSmiLessThanOrEqual(
3755           length, IntPtrOrSmiConstant(PropertyArray::LengthField::kMax, mode),
3756           mode));
3757   StoreObjectFieldNoWriteBarrier(
3758       property_array, PropertyArray::kLengthAndHashOffset,
3759       ParameterToTagged(length, mode), MachineRepresentation::kTaggedSigned);
3760 }
3761 
AllocatePropertyArray(Node * capacity_node,ParameterMode mode,AllocationFlags flags)3762 Node* CodeStubAssembler::AllocatePropertyArray(Node* capacity_node,
3763                                                ParameterMode mode,
3764                                                AllocationFlags flags) {
3765   CSA_SLOW_ASSERT(this, MatchesParameterMode(capacity_node, mode));
3766   CSA_ASSERT(this, IntPtrOrSmiGreaterThan(capacity_node,
3767                                           IntPtrOrSmiConstant(0, mode), mode));
3768   Node* total_size = GetPropertyArrayAllocationSize(capacity_node, mode);
3769 
3770   Node* array = Allocate(total_size, flags);
3771   Heap::RootListIndex map_index = Heap::kPropertyArrayMapRootIndex;
3772   DCHECK(Heap::RootIsImmortalImmovable(map_index));
3773   StoreMapNoWriteBarrier(array, map_index);
3774   InitializePropertyArrayLength(array, capacity_node, mode);
3775   return array;
3776 }
3777 
FillPropertyArrayWithUndefined(Node * array,Node * from_node,Node * to_node,ParameterMode mode)3778 void CodeStubAssembler::FillPropertyArrayWithUndefined(Node* array,
3779                                                        Node* from_node,
3780                                                        Node* to_node,
3781                                                        ParameterMode mode) {
3782   CSA_SLOW_ASSERT(this, MatchesParameterMode(from_node, mode));
3783   CSA_SLOW_ASSERT(this, MatchesParameterMode(to_node, mode));
3784   CSA_SLOW_ASSERT(this, IsPropertyArray(array));
3785   ElementsKind kind = PACKED_ELEMENTS;
3786   Node* value = UndefinedConstant();
3787   BuildFastFixedArrayForEach(array, kind, from_node, to_node,
3788                              [this, value](Node* array, Node* offset) {
3789                                StoreNoWriteBarrier(
3790                                    MachineRepresentation::kTagged, array,
3791                                    offset, value);
3792                              },
3793                              mode);
3794 }
3795 
FillFixedArrayWithValue(ElementsKind kind,Node * array,Node * from_node,Node * to_node,Heap::RootListIndex value_root_index,ParameterMode mode)3796 void CodeStubAssembler::FillFixedArrayWithValue(
3797     ElementsKind kind, Node* array, Node* from_node, Node* to_node,
3798     Heap::RootListIndex value_root_index, ParameterMode mode) {
3799   CSA_SLOW_ASSERT(this, MatchesParameterMode(from_node, mode));
3800   CSA_SLOW_ASSERT(this, MatchesParameterMode(to_node, mode));
3801   CSA_SLOW_ASSERT(this, IsFixedArrayWithKind(array, kind));
3802   DCHECK(value_root_index == Heap::kTheHoleValueRootIndex ||
3803          value_root_index == Heap::kUndefinedValueRootIndex);
3804 
3805   // Determine the value to initialize the {array} based
3806   // on the {value_root_index} and the elements {kind}.
3807   Node* value = LoadRoot(value_root_index);
3808   if (IsDoubleElementsKind(kind)) {
3809     value = LoadHeapNumberValue(value);
3810   }
3811 
3812   BuildFastFixedArrayForEach(
3813       array, kind, from_node, to_node,
3814       [this, value, kind](Node* array, Node* offset) {
3815         if (IsDoubleElementsKind(kind)) {
3816           StoreNoWriteBarrier(MachineRepresentation::kFloat64, array, offset,
3817                               value);
3818         } else {
3819           StoreNoWriteBarrier(MachineRepresentation::kTagged, array, offset,
3820                               value);
3821         }
3822       },
3823       mode);
3824 }
3825 
CopyFixedArrayElements(ElementsKind from_kind,Node * from_array,ElementsKind to_kind,Node * to_array,Node * first_element,Node * element_count,Node * capacity,WriteBarrierMode barrier_mode,ParameterMode mode)3826 void CodeStubAssembler::CopyFixedArrayElements(
3827     ElementsKind from_kind, Node* from_array, ElementsKind to_kind,
3828     Node* to_array, Node* first_element, Node* element_count, Node* capacity,
3829     WriteBarrierMode barrier_mode, ParameterMode mode) {
3830   CSA_SLOW_ASSERT(this, MatchesParameterMode(element_count, mode));
3831   CSA_SLOW_ASSERT(this, MatchesParameterMode(capacity, mode));
3832   CSA_SLOW_ASSERT(this, IsFixedArrayWithKindOrEmpty(from_array, from_kind));
3833   CSA_SLOW_ASSERT(this, IsFixedArrayWithKindOrEmpty(to_array, to_kind));
3834   STATIC_ASSERT(FixedArray::kHeaderSize == FixedDoubleArray::kHeaderSize);
3835   const int first_element_offset = FixedArray::kHeaderSize - kHeapObjectTag;
3836   Comment("[ CopyFixedArrayElements");
3837 
3838   // Typed array elements are not supported.
3839   DCHECK(!IsFixedTypedArrayElementsKind(from_kind));
3840   DCHECK(!IsFixedTypedArrayElementsKind(to_kind));
3841 
3842   Label done(this);
3843   bool from_double_elements = IsDoubleElementsKind(from_kind);
3844   bool to_double_elements = IsDoubleElementsKind(to_kind);
3845   bool doubles_to_objects_conversion =
3846       IsDoubleElementsKind(from_kind) && IsObjectElementsKind(to_kind);
3847   bool needs_write_barrier =
3848       doubles_to_objects_conversion ||
3849       (barrier_mode == UPDATE_WRITE_BARRIER && IsObjectElementsKind(to_kind));
3850   bool element_offset_matches =
3851       !needs_write_barrier && (Is64() || IsDoubleElementsKind(from_kind) ==
3852                                              IsDoubleElementsKind(to_kind));
3853   Node* double_hole =
3854       Is64() ? ReinterpretCast<UintPtrT>(Int64Constant(kHoleNanInt64))
3855              : ReinterpretCast<UintPtrT>(Int32Constant(kHoleNanLower32));
3856 
3857   if (doubles_to_objects_conversion) {
3858     // If the copy might trigger a GC, make sure that the FixedArray is
3859     // pre-initialized with holes to make sure that it's always in a
3860     // consistent state.
3861     FillFixedArrayWithValue(to_kind, to_array, IntPtrOrSmiConstant(0, mode),
3862                             capacity, Heap::kTheHoleValueRootIndex, mode);
3863   } else if (element_count != capacity) {
3864     FillFixedArrayWithValue(to_kind, to_array, element_count, capacity,
3865                             Heap::kTheHoleValueRootIndex, mode);
3866   }
3867 
3868   Node* first_from_element_offset =
3869       ElementOffsetFromIndex(first_element, from_kind, mode, 0);
3870   Node* limit_offset = IntPtrAdd(first_from_element_offset,
3871                                  IntPtrConstant(first_element_offset));
3872   VARIABLE(
3873       var_from_offset, MachineType::PointerRepresentation(),
3874       ElementOffsetFromIndex(IntPtrOrSmiAdd(first_element, element_count, mode),
3875                              from_kind, mode, first_element_offset));
3876   // This second variable is used only when the element sizes of source and
3877   // destination arrays do not match.
3878   VARIABLE(var_to_offset, MachineType::PointerRepresentation());
3879   if (element_offset_matches) {
3880     var_to_offset.Bind(var_from_offset.value());
3881   } else {
3882     var_to_offset.Bind(ElementOffsetFromIndex(element_count, to_kind, mode,
3883                                               first_element_offset));
3884   }
3885 
3886   Variable* vars[] = {&var_from_offset, &var_to_offset};
3887   Label decrement(this, 2, vars);
3888 
3889   Node* to_array_adjusted =
3890       element_offset_matches
3891           ? IntPtrSub(BitcastTaggedToWord(to_array), first_from_element_offset)
3892           : to_array;
3893 
3894   Branch(WordEqual(var_from_offset.value(), limit_offset), &done, &decrement);
3895 
3896   BIND(&decrement);
3897   {
3898     Node* from_offset = IntPtrSub(
3899         var_from_offset.value(),
3900         IntPtrConstant(from_double_elements ? kDoubleSize : kPointerSize));
3901     var_from_offset.Bind(from_offset);
3902 
3903     Node* to_offset;
3904     if (element_offset_matches) {
3905       to_offset = from_offset;
3906     } else {
3907       to_offset = IntPtrSub(
3908           var_to_offset.value(),
3909           IntPtrConstant(to_double_elements ? kDoubleSize : kPointerSize));
3910       var_to_offset.Bind(to_offset);
3911     }
3912 
3913     Label next_iter(this), store_double_hole(this);
3914     Label* if_hole;
3915     if (doubles_to_objects_conversion) {
3916       // The target elements array is already preinitialized with holes, so we
3917       // can just proceed with the next iteration.
3918       if_hole = &next_iter;
3919     } else if (IsDoubleElementsKind(to_kind)) {
3920       if_hole = &store_double_hole;
3921     } else {
3922       // In all the other cases don't check for holes and copy the data as is.
3923       if_hole = nullptr;
3924     }
3925 
3926     Node* value = LoadElementAndPrepareForStore(
3927         from_array, var_from_offset.value(), from_kind, to_kind, if_hole);
3928 
3929     if (needs_write_barrier) {
3930       CHECK_EQ(to_array, to_array_adjusted);
3931       Store(to_array_adjusted, to_offset, value);
3932     } else if (to_double_elements) {
3933       StoreNoWriteBarrier(MachineRepresentation::kFloat64, to_array_adjusted,
3934                           to_offset, value);
3935     } else {
3936       StoreNoWriteBarrier(MachineRepresentation::kTagged, to_array_adjusted,
3937                           to_offset, value);
3938     }
3939     Goto(&next_iter);
3940 
3941     if (if_hole == &store_double_hole) {
3942       BIND(&store_double_hole);
3943       // Don't use doubles to store the hole double, since manipulating the
3944       // signaling NaN used for the hole in C++, e.g. with bit_cast, will
3945       // change its value on ia32 (the x87 stack is used to return values
3946       // and stores to the stack silently clear the signalling bit).
3947       //
3948       // TODO(danno): When we have a Float32/Float64 wrapper class that
3949       // preserves double bits during manipulation, remove this code/change
3950       // this to an indexed Float64 store.
3951       if (Is64()) {
3952         StoreNoWriteBarrier(MachineRepresentation::kWord64, to_array_adjusted,
3953                             to_offset, double_hole);
3954       } else {
3955         StoreNoWriteBarrier(MachineRepresentation::kWord32, to_array_adjusted,
3956                             to_offset, double_hole);
3957         StoreNoWriteBarrier(MachineRepresentation::kWord32, to_array_adjusted,
3958                             IntPtrAdd(to_offset, IntPtrConstant(kPointerSize)),
3959                             double_hole);
3960       }
3961       Goto(&next_iter);
3962     }
3963 
3964     BIND(&next_iter);
3965     Node* compare = WordNotEqual(from_offset, limit_offset);
3966     Branch(compare, &decrement, &done);
3967   }
3968 
3969   BIND(&done);
3970   Comment("] CopyFixedArrayElements");
3971 }
3972 
ConvertFixedArrayBaseToFixedArray(TNode<FixedArrayBase> base,Label * cast_fail)3973 TNode<FixedArray> CodeStubAssembler::ConvertFixedArrayBaseToFixedArray(
3974     TNode<FixedArrayBase> base, Label* cast_fail) {
3975   Label fixed_array(this);
3976   TNode<Map> map = LoadMap(base);
3977   GotoIf(WordEqual(map, LoadRoot(Heap::kFixedArrayMapRootIndex)), &fixed_array);
3978   GotoIf(WordNotEqual(map, LoadRoot(Heap::kFixedCOWArrayMapRootIndex)),
3979          cast_fail);
3980   Goto(&fixed_array);
3981   BIND(&fixed_array);
3982   return UncheckedCast<FixedArray>(base);
3983 }
3984 
CopyPropertyArrayValues(Node * from_array,Node * to_array,Node * property_count,WriteBarrierMode barrier_mode,ParameterMode mode)3985 void CodeStubAssembler::CopyPropertyArrayValues(Node* from_array,
3986                                                 Node* to_array,
3987                                                 Node* property_count,
3988                                                 WriteBarrierMode barrier_mode,
3989                                                 ParameterMode mode) {
3990   CSA_SLOW_ASSERT(this, MatchesParameterMode(property_count, mode));
3991   CSA_SLOW_ASSERT(this, Word32Or(IsPropertyArray(from_array),
3992                                  IsEmptyFixedArray(from_array)));
3993   CSA_SLOW_ASSERT(this, IsPropertyArray(to_array));
3994   Comment("[ CopyPropertyArrayValues");
3995 
3996   bool needs_write_barrier = barrier_mode == UPDATE_WRITE_BARRIER;
3997   Node* start = IntPtrOrSmiConstant(0, mode);
3998   ElementsKind kind = PACKED_ELEMENTS;
3999   BuildFastFixedArrayForEach(
4000       from_array, kind, start, property_count,
4001       [this, to_array, needs_write_barrier](Node* array, Node* offset) {
4002         Node* value = Load(MachineType::AnyTagged(), array, offset);
4003 
4004         if (needs_write_barrier) {
4005           Store(to_array, offset, value);
4006         } else {
4007           StoreNoWriteBarrier(MachineRepresentation::kTagged, to_array, offset,
4008                               value);
4009         }
4010       },
4011       mode);
4012   Comment("] CopyPropertyArrayValues");
4013 }
4014 
CopyStringCharacters(Node * from_string,Node * to_string,TNode<IntPtrT> from_index,TNode<IntPtrT> to_index,TNode<IntPtrT> character_count,String::Encoding from_encoding,String::Encoding to_encoding)4015 void CodeStubAssembler::CopyStringCharacters(Node* from_string, Node* to_string,
4016                                              TNode<IntPtrT> from_index,
4017                                              TNode<IntPtrT> to_index,
4018                                              TNode<IntPtrT> character_count,
4019                                              String::Encoding from_encoding,
4020                                              String::Encoding to_encoding) {
4021   // Cannot assert IsString(from_string) and IsString(to_string) here because
4022   // CSA::SubString can pass in faked sequential strings when handling external
4023   // subject strings.
4024   bool from_one_byte = from_encoding == String::ONE_BYTE_ENCODING;
4025   bool to_one_byte = to_encoding == String::ONE_BYTE_ENCODING;
4026   DCHECK_IMPLIES(to_one_byte, from_one_byte);
4027   Comment("CopyStringCharacters %s -> %s",
4028           from_one_byte ? "ONE_BYTE_ENCODING" : "TWO_BYTE_ENCODING",
4029           to_one_byte ? "ONE_BYTE_ENCODING" : "TWO_BYTE_ENCODING");
4030 
4031   ElementsKind from_kind = from_one_byte ? UINT8_ELEMENTS : UINT16_ELEMENTS;
4032   ElementsKind to_kind = to_one_byte ? UINT8_ELEMENTS : UINT16_ELEMENTS;
4033   STATIC_ASSERT(SeqOneByteString::kHeaderSize == SeqTwoByteString::kHeaderSize);
4034   int header_size = SeqOneByteString::kHeaderSize - kHeapObjectTag;
4035   Node* from_offset = ElementOffsetFromIndex(from_index, from_kind,
4036                                              INTPTR_PARAMETERS, header_size);
4037   Node* to_offset =
4038       ElementOffsetFromIndex(to_index, to_kind, INTPTR_PARAMETERS, header_size);
4039   Node* byte_count =
4040       ElementOffsetFromIndex(character_count, from_kind, INTPTR_PARAMETERS);
4041   Node* limit_offset = IntPtrAdd(from_offset, byte_count);
4042 
4043   // Prepare the fast loop
4044   MachineType type =
4045       from_one_byte ? MachineType::Uint8() : MachineType::Uint16();
4046   MachineRepresentation rep = to_one_byte ? MachineRepresentation::kWord8
4047                                           : MachineRepresentation::kWord16;
4048   int from_increment = 1 << ElementsKindToShiftSize(from_kind);
4049   int to_increment = 1 << ElementsKindToShiftSize(to_kind);
4050 
4051   VARIABLE(current_to_offset, MachineType::PointerRepresentation(), to_offset);
4052   VariableList vars({&current_to_offset}, zone());
4053   int to_index_constant = 0, from_index_constant = 0;
4054   bool index_same = (from_encoding == to_encoding) &&
4055                     (from_index == to_index ||
4056                      (ToInt32Constant(from_index, from_index_constant) &&
4057                       ToInt32Constant(to_index, to_index_constant) &&
4058                       from_index_constant == to_index_constant));
4059   BuildFastLoop(vars, from_offset, limit_offset,
4060                 [this, from_string, to_string, &current_to_offset, to_increment,
4061                  type, rep, index_same](Node* offset) {
4062                   Node* value = Load(type, from_string, offset);
4063                   StoreNoWriteBarrier(
4064                       rep, to_string,
4065                       index_same ? offset : current_to_offset.value(), value);
4066                   if (!index_same) {
4067                     Increment(&current_to_offset, to_increment);
4068                   }
4069                 },
4070                 from_increment, INTPTR_PARAMETERS, IndexAdvanceMode::kPost);
4071 }
4072 
LoadElementAndPrepareForStore(Node * array,Node * offset,ElementsKind from_kind,ElementsKind to_kind,Label * if_hole)4073 Node* CodeStubAssembler::LoadElementAndPrepareForStore(Node* array,
4074                                                        Node* offset,
4075                                                        ElementsKind from_kind,
4076                                                        ElementsKind to_kind,
4077                                                        Label* if_hole) {
4078   CSA_SLOW_ASSERT(this, IsFixedArrayWithKind(array, from_kind));
4079   if (IsDoubleElementsKind(from_kind)) {
4080     Node* value =
4081         LoadDoubleWithHoleCheck(array, offset, if_hole, MachineType::Float64());
4082     if (!IsDoubleElementsKind(to_kind)) {
4083       value = AllocateHeapNumberWithValue(value);
4084     }
4085     return value;
4086 
4087   } else {
4088     Node* value = Load(MachineType::AnyTagged(), array, offset);
4089     if (if_hole) {
4090       GotoIf(WordEqual(value, TheHoleConstant()), if_hole);
4091     }
4092     if (IsDoubleElementsKind(to_kind)) {
4093       if (IsSmiElementsKind(from_kind)) {
4094         value = SmiToFloat64(value);
4095       } else {
4096         value = LoadHeapNumberValue(value);
4097       }
4098     }
4099     return value;
4100   }
4101 }
4102 
CalculateNewElementsCapacity(Node * old_capacity,ParameterMode mode)4103 Node* CodeStubAssembler::CalculateNewElementsCapacity(Node* old_capacity,
4104                                                       ParameterMode mode) {
4105   CSA_SLOW_ASSERT(this, MatchesParameterMode(old_capacity, mode));
4106   Node* half_old_capacity = WordOrSmiShr(old_capacity, 1, mode);
4107   Node* new_capacity = IntPtrOrSmiAdd(half_old_capacity, old_capacity, mode);
4108   Node* padding =
4109       IntPtrOrSmiConstant(JSObject::kMinAddedElementsCapacity, mode);
4110   return IntPtrOrSmiAdd(new_capacity, padding, mode);
4111 }
4112 
TryGrowElementsCapacity(Node * object,Node * elements,ElementsKind kind,Node * key,Label * bailout)4113 Node* CodeStubAssembler::TryGrowElementsCapacity(Node* object, Node* elements,
4114                                                  ElementsKind kind, Node* key,
4115                                                  Label* bailout) {
4116   CSA_SLOW_ASSERT(this, TaggedIsNotSmi(object));
4117   CSA_SLOW_ASSERT(this, IsFixedArrayWithKindOrEmpty(elements, kind));
4118   CSA_SLOW_ASSERT(this, TaggedIsSmi(key));
4119   Node* capacity = LoadFixedArrayBaseLength(elements);
4120 
4121   ParameterMode mode = OptimalParameterMode();
4122   capacity = TaggedToParameter(capacity, mode);
4123   key = TaggedToParameter(key, mode);
4124 
4125   return TryGrowElementsCapacity(object, elements, kind, key, capacity, mode,
4126                                  bailout);
4127 }
4128 
TryGrowElementsCapacity(Node * object,Node * elements,ElementsKind kind,Node * key,Node * capacity,ParameterMode mode,Label * bailout)4129 Node* CodeStubAssembler::TryGrowElementsCapacity(Node* object, Node* elements,
4130                                                  ElementsKind kind, Node* key,
4131                                                  Node* capacity,
4132                                                  ParameterMode mode,
4133                                                  Label* bailout) {
4134   Comment("TryGrowElementsCapacity");
4135   CSA_SLOW_ASSERT(this, TaggedIsNotSmi(object));
4136   CSA_SLOW_ASSERT(this, IsFixedArrayWithKindOrEmpty(elements, kind));
4137   CSA_SLOW_ASSERT(this, MatchesParameterMode(capacity, mode));
4138   CSA_SLOW_ASSERT(this, MatchesParameterMode(key, mode));
4139 
4140   // If the gap growth is too big, fall back to the runtime.
4141   Node* max_gap = IntPtrOrSmiConstant(JSObject::kMaxGap, mode);
4142   Node* max_capacity = IntPtrOrSmiAdd(capacity, max_gap, mode);
4143   GotoIf(UintPtrOrSmiGreaterThanOrEqual(key, max_capacity, mode), bailout);
4144 
4145   // Calculate the capacity of the new backing store.
4146   Node* new_capacity = CalculateNewElementsCapacity(
4147       IntPtrOrSmiAdd(key, IntPtrOrSmiConstant(1, mode), mode), mode);
4148   return GrowElementsCapacity(object, elements, kind, kind, capacity,
4149                               new_capacity, mode, bailout);
4150 }
4151 
GrowElementsCapacity(Node * object,Node * elements,ElementsKind from_kind,ElementsKind to_kind,Node * capacity,Node * new_capacity,ParameterMode mode,Label * bailout)4152 Node* CodeStubAssembler::GrowElementsCapacity(
4153     Node* object, Node* elements, ElementsKind from_kind, ElementsKind to_kind,
4154     Node* capacity, Node* new_capacity, ParameterMode mode, Label* bailout) {
4155   Comment("[ GrowElementsCapacity");
4156   CSA_SLOW_ASSERT(this, TaggedIsNotSmi(object));
4157   CSA_SLOW_ASSERT(this, IsFixedArrayWithKindOrEmpty(elements, from_kind));
4158   CSA_SLOW_ASSERT(this, MatchesParameterMode(capacity, mode));
4159   CSA_SLOW_ASSERT(this, MatchesParameterMode(new_capacity, mode));
4160 
4161   // If size of the allocation for the new capacity doesn't fit in a page
4162   // that we can bump-pointer allocate from, fall back to the runtime.
4163   int max_size = FixedArrayBase::GetMaxLengthForNewSpaceAllocation(to_kind);
4164   GotoIf(UintPtrOrSmiGreaterThanOrEqual(
4165              new_capacity, IntPtrOrSmiConstant(max_size, mode), mode),
4166          bailout);
4167 
4168   // Allocate the new backing store.
4169   Node* new_elements = AllocateFixedArray(to_kind, new_capacity, mode);
4170 
4171   // Copy the elements from the old elements store to the new.
4172   // The size-check above guarantees that the |new_elements| is allocated
4173   // in new space so we can skip the write barrier.
4174   CopyFixedArrayElements(from_kind, elements, to_kind, new_elements, capacity,
4175                          new_capacity, SKIP_WRITE_BARRIER, mode);
4176 
4177   StoreObjectField(object, JSObject::kElementsOffset, new_elements);
4178   Comment("] GrowElementsCapacity");
4179   return new_elements;
4180 }
4181 
InitializeAllocationMemento(Node * base,Node * base_allocation_size,Node * allocation_site)4182 void CodeStubAssembler::InitializeAllocationMemento(Node* base,
4183                                                     Node* base_allocation_size,
4184                                                     Node* allocation_site) {
4185   Comment("[Initialize AllocationMemento");
4186   Node* memento = InnerAllocate(base, base_allocation_size);
4187   StoreMapNoWriteBarrier(memento, Heap::kAllocationMementoMapRootIndex);
4188   StoreObjectFieldNoWriteBarrier(
4189       memento, AllocationMemento::kAllocationSiteOffset, allocation_site);
4190   if (FLAG_allocation_site_pretenuring) {
4191     TNode<Smi> count = CAST(LoadObjectField(
4192         allocation_site, AllocationSite::kPretenureCreateCountOffset));
4193     TNode<Smi> incremented_count = SmiAdd(count, SmiConstant(1));
4194     StoreObjectFieldNoWriteBarrier(allocation_site,
4195                                    AllocationSite::kPretenureCreateCountOffset,
4196                                    incremented_count);
4197   }
4198   Comment("]");
4199 }
4200 
TryTaggedToFloat64(Node * value,Label * if_valueisnotnumber)4201 Node* CodeStubAssembler::TryTaggedToFloat64(Node* value,
4202                                             Label* if_valueisnotnumber) {
4203   Label out(this);
4204   VARIABLE(var_result, MachineRepresentation::kFloat64);
4205 
4206   // Check if the {value} is a Smi or a HeapObject.
4207   Label if_valueissmi(this), if_valueisnotsmi(this);
4208   Branch(TaggedIsSmi(value), &if_valueissmi, &if_valueisnotsmi);
4209 
4210   BIND(&if_valueissmi);
4211   {
4212     // Convert the Smi {value}.
4213     var_result.Bind(SmiToFloat64(value));
4214     Goto(&out);
4215   }
4216 
4217   BIND(&if_valueisnotsmi);
4218   {
4219     // Check if {value} is a HeapNumber.
4220     Label if_valueisheapnumber(this);
4221     Branch(IsHeapNumber(value), &if_valueisheapnumber, if_valueisnotnumber);
4222 
4223     BIND(&if_valueisheapnumber);
4224     {
4225       // Load the floating point value.
4226       var_result.Bind(LoadHeapNumberValue(value));
4227       Goto(&out);
4228     }
4229   }
4230   BIND(&out);
4231   return var_result.value();
4232 }
4233 
TruncateTaggedToFloat64(Node * context,Node * value)4234 Node* CodeStubAssembler::TruncateTaggedToFloat64(Node* context, Node* value) {
4235   // We might need to loop once due to ToNumber conversion.
4236   VARIABLE(var_value, MachineRepresentation::kTagged);
4237   VARIABLE(var_result, MachineRepresentation::kFloat64);
4238   Label loop(this, &var_value), done_loop(this, &var_result);
4239   var_value.Bind(value);
4240   Goto(&loop);
4241   BIND(&loop);
4242   {
4243     Label if_valueisnotnumber(this, Label::kDeferred);
4244 
4245     // Load the current {value}.
4246     value = var_value.value();
4247 
4248     // Convert {value} to Float64 if it is a number and convert it to a number
4249     // otherwise.
4250     Node* const result = TryTaggedToFloat64(value, &if_valueisnotnumber);
4251     var_result.Bind(result);
4252     Goto(&done_loop);
4253 
4254     BIND(&if_valueisnotnumber);
4255     {
4256       // Convert the {value} to a Number first.
4257       var_value.Bind(CallBuiltin(Builtins::kNonNumberToNumber, context, value));
4258       Goto(&loop);
4259     }
4260   }
4261   BIND(&done_loop);
4262   return var_result.value();
4263 }
4264 
TruncateTaggedToWord32(Node * context,Node * value)4265 Node* CodeStubAssembler::TruncateTaggedToWord32(Node* context, Node* value) {
4266   VARIABLE(var_result, MachineRepresentation::kWord32);
4267   Label done(this);
4268   TaggedToWord32OrBigIntImpl<Object::Conversion::kToNumber>(context, value,
4269                                                             &done, &var_result);
4270   BIND(&done);
4271   return var_result.value();
4272 }
4273 
4274 // Truncate {value} to word32 and jump to {if_number} if it is a Number,
4275 // or find that it is a BigInt and jump to {if_bigint}.
TaggedToWord32OrBigInt(Node * context,Node * value,Label * if_number,Variable * var_word32,Label * if_bigint,Variable * var_bigint)4276 void CodeStubAssembler::TaggedToWord32OrBigInt(Node* context, Node* value,
4277                                                Label* if_number,
4278                                                Variable* var_word32,
4279                                                Label* if_bigint,
4280                                                Variable* var_bigint) {
4281   TaggedToWord32OrBigIntImpl<Object::Conversion::kToNumeric>(
4282       context, value, if_number, var_word32, if_bigint, var_bigint);
4283 }
4284 
4285 // Truncate {value} to word32 and jump to {if_number} if it is a Number,
4286 // or find that it is a BigInt and jump to {if_bigint}. In either case,
4287 // store the type feedback in {var_feedback}.
TaggedToWord32OrBigIntWithFeedback(Node * context,Node * value,Label * if_number,Variable * var_word32,Label * if_bigint,Variable * var_bigint,Variable * var_feedback)4288 void CodeStubAssembler::TaggedToWord32OrBigIntWithFeedback(
4289     Node* context, Node* value, Label* if_number, Variable* var_word32,
4290     Label* if_bigint, Variable* var_bigint, Variable* var_feedback) {
4291   TaggedToWord32OrBigIntImpl<Object::Conversion::kToNumeric>(
4292       context, value, if_number, var_word32, if_bigint, var_bigint,
4293       var_feedback);
4294 }
4295 
4296 template <Object::Conversion conversion>
TaggedToWord32OrBigIntImpl(Node * context,Node * value,Label * if_number,Variable * var_word32,Label * if_bigint,Variable * var_bigint,Variable * var_feedback)4297 void CodeStubAssembler::TaggedToWord32OrBigIntImpl(
4298     Node* context, Node* value, Label* if_number, Variable* var_word32,
4299     Label* if_bigint, Variable* var_bigint, Variable* var_feedback) {
4300   DCHECK(var_word32->rep() == MachineRepresentation::kWord32);
4301   DCHECK(var_bigint == nullptr ||
4302          var_bigint->rep() == MachineRepresentation::kTagged);
4303   DCHECK(var_feedback == nullptr ||
4304          var_feedback->rep() == MachineRepresentation::kTaggedSigned);
4305 
4306   // We might need to loop after conversion.
4307   VARIABLE(var_value, MachineRepresentation::kTagged, value);
4308   OverwriteFeedback(var_feedback, BinaryOperationFeedback::kNone);
4309   Variable* loop_vars[] = {&var_value, var_feedback};
4310   int num_vars =
4311       var_feedback != nullptr ? arraysize(loop_vars) : arraysize(loop_vars) - 1;
4312   Label loop(this, num_vars, loop_vars);
4313   Goto(&loop);
4314   BIND(&loop);
4315   {
4316     value = var_value.value();
4317     Label not_smi(this), is_heap_number(this), is_oddball(this),
4318         is_bigint(this);
4319     GotoIf(TaggedIsNotSmi(value), &not_smi);
4320 
4321     // {value} is a Smi.
4322     var_word32->Bind(SmiToInt32(value));
4323     CombineFeedback(var_feedback, BinaryOperationFeedback::kSignedSmall);
4324     Goto(if_number);
4325 
4326     BIND(&not_smi);
4327     Node* map = LoadMap(value);
4328     GotoIf(IsHeapNumberMap(map), &is_heap_number);
4329     Node* instance_type = LoadMapInstanceType(map);
4330     if (conversion == Object::Conversion::kToNumeric) {
4331       GotoIf(IsBigIntInstanceType(instance_type), &is_bigint);
4332     }
4333 
4334     // Not HeapNumber (or BigInt if conversion == kToNumeric).
4335     {
4336       if (var_feedback != nullptr) {
4337         // We do not require an Or with earlier feedback here because once we
4338         // convert the value to a Numeric, we cannot reach this path. We can
4339         // only reach this path on the first pass when the feedback is kNone.
4340         CSA_ASSERT(this, SmiEqual(CAST(var_feedback->value()),
4341                                   SmiConstant(BinaryOperationFeedback::kNone)));
4342       }
4343       GotoIf(InstanceTypeEqual(instance_type, ODDBALL_TYPE), &is_oddball);
4344       // Not an oddball either -> convert.
4345       auto builtin = conversion == Object::Conversion::kToNumeric
4346                          ? Builtins::kNonNumberToNumeric
4347                          : Builtins::kNonNumberToNumber;
4348       var_value.Bind(CallBuiltin(builtin, context, value));
4349       OverwriteFeedback(var_feedback, BinaryOperationFeedback::kAny);
4350       Goto(&loop);
4351 
4352       BIND(&is_oddball);
4353       var_value.Bind(LoadObjectField(value, Oddball::kToNumberOffset));
4354       OverwriteFeedback(var_feedback,
4355                         BinaryOperationFeedback::kNumberOrOddball);
4356       Goto(&loop);
4357     }
4358 
4359     BIND(&is_heap_number);
4360     var_word32->Bind(TruncateHeapNumberValueToWord32(value));
4361     CombineFeedback(var_feedback, BinaryOperationFeedback::kNumber);
4362     Goto(if_number);
4363 
4364     if (conversion == Object::Conversion::kToNumeric) {
4365       BIND(&is_bigint);
4366       var_bigint->Bind(value);
4367       CombineFeedback(var_feedback, BinaryOperationFeedback::kBigInt);
4368       Goto(if_bigint);
4369     }
4370   }
4371 }
4372 
TruncateHeapNumberValueToWord32(Node * object)4373 Node* CodeStubAssembler::TruncateHeapNumberValueToWord32(Node* object) {
4374   Node* value = LoadHeapNumberValue(object);
4375   return TruncateFloat64ToWord32(value);
4376 }
4377 
ChangeFloat64ToTagged(SloppyTNode<Float64T> value)4378 TNode<Number> CodeStubAssembler::ChangeFloat64ToTagged(
4379     SloppyTNode<Float64T> value) {
4380   TNode<Int32T> value32 = RoundFloat64ToInt32(value);
4381   TNode<Float64T> value64 = ChangeInt32ToFloat64(value32);
4382 
4383   Label if_valueisint32(this), if_valueisheapnumber(this), if_join(this);
4384 
4385   Label if_valueisequal(this), if_valueisnotequal(this);
4386   Branch(Float64Equal(value, value64), &if_valueisequal, &if_valueisnotequal);
4387   BIND(&if_valueisequal);
4388   {
4389     GotoIfNot(Word32Equal(value32, Int32Constant(0)), &if_valueisint32);
4390     Branch(Int32LessThan(UncheckedCast<Int32T>(Float64ExtractHighWord32(value)),
4391                          Int32Constant(0)),
4392            &if_valueisheapnumber, &if_valueisint32);
4393   }
4394   BIND(&if_valueisnotequal);
4395   Goto(&if_valueisheapnumber);
4396 
4397   TVARIABLE(Number, var_result);
4398   BIND(&if_valueisint32);
4399   {
4400     if (Is64()) {
4401       TNode<Smi> result = SmiTag(ChangeInt32ToIntPtr(value32));
4402       var_result = result;
4403       Goto(&if_join);
4404     } else {
4405       TNode<PairT<Int32T, BoolT>> pair = Int32AddWithOverflow(value32, value32);
4406       TNode<BoolT> overflow = Projection<1>(pair);
4407       Label if_overflow(this, Label::kDeferred), if_notoverflow(this);
4408       Branch(overflow, &if_overflow, &if_notoverflow);
4409       BIND(&if_overflow);
4410       Goto(&if_valueisheapnumber);
4411       BIND(&if_notoverflow);
4412       {
4413         TNode<IntPtrT> result = ChangeInt32ToIntPtr(Projection<0>(pair));
4414         var_result = BitcastWordToTaggedSigned(result);
4415         Goto(&if_join);
4416       }
4417     }
4418   }
4419   BIND(&if_valueisheapnumber);
4420   {
4421     var_result = AllocateHeapNumberWithValue(value);
4422     Goto(&if_join);
4423   }
4424   BIND(&if_join);
4425   return var_result.value();
4426 }
4427 
ChangeInt32ToTagged(SloppyTNode<Int32T> value)4428 TNode<Number> CodeStubAssembler::ChangeInt32ToTagged(
4429     SloppyTNode<Int32T> value) {
4430   if (Is64()) {
4431     return SmiTag(ChangeInt32ToIntPtr(value));
4432   }
4433   TVARIABLE(Number, var_result);
4434   TNode<PairT<Int32T, BoolT>> pair = Int32AddWithOverflow(value, value);
4435   TNode<BoolT> overflow = Projection<1>(pair);
4436   Label if_overflow(this, Label::kDeferred), if_notoverflow(this),
4437       if_join(this);
4438   Branch(overflow, &if_overflow, &if_notoverflow);
4439   BIND(&if_overflow);
4440   {
4441     TNode<Float64T> value64 = ChangeInt32ToFloat64(value);
4442     TNode<HeapNumber> result = AllocateHeapNumberWithValue(value64);
4443     var_result = result;
4444   }
4445   Goto(&if_join);
4446   BIND(&if_notoverflow);
4447   {
4448     TNode<Smi> result =
4449         BitcastWordToTaggedSigned(ChangeInt32ToIntPtr(Projection<0>(pair)));
4450     var_result = result;
4451   }
4452   Goto(&if_join);
4453   BIND(&if_join);
4454   return var_result.value();
4455 }
4456 
ChangeUint32ToTagged(SloppyTNode<Uint32T> value)4457 TNode<Number> CodeStubAssembler::ChangeUint32ToTagged(
4458     SloppyTNode<Uint32T> value) {
4459   Label if_overflow(this, Label::kDeferred), if_not_overflow(this),
4460       if_join(this);
4461   TVARIABLE(Number, var_result);
4462   // If {value} > 2^31 - 1, we need to store it in a HeapNumber.
4463   Branch(Uint32LessThan(Int32Constant(Smi::kMaxValue), value), &if_overflow,
4464          &if_not_overflow);
4465 
4466   BIND(&if_not_overflow);
4467   {
4468     if (Is64()) {
4469       var_result =
4470           SmiTag(ReinterpretCast<IntPtrT>(ChangeUint32ToUint64(value)));
4471     } else {
4472       // If tagging {value} results in an overflow, we need to use a HeapNumber
4473       // to represent it.
4474       // TODO(tebbi): This overflow can never happen.
4475       TNode<PairT<Int32T, BoolT>> pair = Int32AddWithOverflow(
4476           UncheckedCast<Int32T>(value), UncheckedCast<Int32T>(value));
4477       TNode<BoolT> overflow = Projection<1>(pair);
4478       GotoIf(overflow, &if_overflow);
4479 
4480       TNode<Smi> result =
4481           BitcastWordToTaggedSigned(ChangeInt32ToIntPtr(Projection<0>(pair)));
4482       var_result = result;
4483     }
4484   }
4485   Goto(&if_join);
4486 
4487   BIND(&if_overflow);
4488   {
4489     TNode<Float64T> float64_value = ChangeUint32ToFloat64(value);
4490     var_result = AllocateHeapNumberWithValue(float64_value);
4491   }
4492   Goto(&if_join);
4493 
4494   BIND(&if_join);
4495   return var_result.value();
4496 }
4497 
ToThisString(Node * context,Node * value,char const * method_name)4498 TNode<String> CodeStubAssembler::ToThisString(Node* context, Node* value,
4499                                               char const* method_name) {
4500   VARIABLE(var_value, MachineRepresentation::kTagged, value);
4501 
4502   // Check if the {value} is a Smi or a HeapObject.
4503   Label if_valueissmi(this, Label::kDeferred), if_valueisnotsmi(this),
4504       if_valueisstring(this);
4505   Branch(TaggedIsSmi(value), &if_valueissmi, &if_valueisnotsmi);
4506   BIND(&if_valueisnotsmi);
4507   {
4508     // Load the instance type of the {value}.
4509     Node* value_instance_type = LoadInstanceType(value);
4510 
4511     // Check if the {value} is already String.
4512     Label if_valueisnotstring(this, Label::kDeferred);
4513     Branch(IsStringInstanceType(value_instance_type), &if_valueisstring,
4514            &if_valueisnotstring);
4515     BIND(&if_valueisnotstring);
4516     {
4517       // Check if the {value} is null.
4518       Label if_valueisnullorundefined(this, Label::kDeferred);
4519       GotoIf(IsNullOrUndefined(value), &if_valueisnullorundefined);
4520       // Convert the {value} to a String.
4521       var_value.Bind(CallBuiltin(Builtins::kToString, context, value));
4522       Goto(&if_valueisstring);
4523 
4524       BIND(&if_valueisnullorundefined);
4525       {
4526         // The {value} is either null or undefined.
4527         ThrowTypeError(context, MessageTemplate::kCalledOnNullOrUndefined,
4528                        method_name);
4529       }
4530     }
4531   }
4532   BIND(&if_valueissmi);
4533   {
4534     // The {value} is a Smi, convert it to a String.
4535     var_value.Bind(CallBuiltin(Builtins::kNumberToString, context, value));
4536     Goto(&if_valueisstring);
4537   }
4538   BIND(&if_valueisstring);
4539   return CAST(var_value.value());
4540 }
4541 
ChangeNumberToFloat64(SloppyTNode<Number> value)4542 TNode<Float64T> CodeStubAssembler::ChangeNumberToFloat64(
4543     SloppyTNode<Number> value) {
4544   // TODO(tebbi): Remove assert once argument is TNode instead of SloppyTNode.
4545   CSA_SLOW_ASSERT(this, IsNumber(value));
4546   TVARIABLE(Float64T, result);
4547   Label smi(this);
4548   Label done(this, &result);
4549   GotoIf(TaggedIsSmi(value), &smi);
4550   result = LoadHeapNumberValue(CAST(value));
4551   Goto(&done);
4552 
4553   BIND(&smi);
4554   {
4555     result = SmiToFloat64(CAST(value));
4556     Goto(&done);
4557   }
4558 
4559   BIND(&done);
4560   return result.value();
4561 }
4562 
ChangeNonnegativeNumberToUintPtr(TNode<Number> value)4563 TNode<UintPtrT> CodeStubAssembler::ChangeNonnegativeNumberToUintPtr(
4564     TNode<Number> value) {
4565   TVARIABLE(UintPtrT, result);
4566   Label smi(this), done(this, &result);
4567   GotoIf(TaggedIsSmi(value), &smi);
4568 
4569   TNode<HeapNumber> value_hn = CAST(value);
4570   result = ChangeFloat64ToUintPtr(LoadHeapNumberValue(value_hn));
4571   Goto(&done);
4572 
4573   BIND(&smi);
4574   TNode<Smi> value_smi = CAST(value);
4575   CSA_SLOW_ASSERT(this, SmiLessThan(SmiConstant(-1), value_smi));
4576   result = UncheckedCast<UintPtrT>(SmiToIntPtr(value_smi));
4577   Goto(&done);
4578 
4579   BIND(&done);
4580   return result.value();
4581 }
4582 
TimesPointerSize(Node * value)4583 Node* CodeStubAssembler::TimesPointerSize(Node* value) {
4584   return WordShl(value, IntPtrConstant(kPointerSizeLog2));
4585 }
4586 
ToThisValue(Node * context,Node * value,PrimitiveType primitive_type,char const * method_name)4587 Node* CodeStubAssembler::ToThisValue(Node* context, Node* value,
4588                                      PrimitiveType primitive_type,
4589                                      char const* method_name) {
4590   // We might need to loop once due to JSValue unboxing.
4591   VARIABLE(var_value, MachineRepresentation::kTagged, value);
4592   Label loop(this, &var_value), done_loop(this),
4593       done_throw(this, Label::kDeferred);
4594   Goto(&loop);
4595   BIND(&loop);
4596   {
4597     // Load the current {value}.
4598     value = var_value.value();
4599 
4600     // Check if the {value} is a Smi or a HeapObject.
4601     GotoIf(TaggedIsSmi(value), (primitive_type == PrimitiveType::kNumber)
4602                                    ? &done_loop
4603                                    : &done_throw);
4604 
4605     // Load the map of the {value}.
4606     Node* value_map = LoadMap(value);
4607 
4608     // Load the instance type of the {value}.
4609     Node* value_instance_type = LoadMapInstanceType(value_map);
4610 
4611     // Check if {value} is a JSValue.
4612     Label if_valueisvalue(this, Label::kDeferred), if_valueisnotvalue(this);
4613     Branch(InstanceTypeEqual(value_instance_type, JS_VALUE_TYPE),
4614            &if_valueisvalue, &if_valueisnotvalue);
4615 
4616     BIND(&if_valueisvalue);
4617     {
4618       // Load the actual value from the {value}.
4619       var_value.Bind(LoadObjectField(value, JSValue::kValueOffset));
4620       Goto(&loop);
4621     }
4622 
4623     BIND(&if_valueisnotvalue);
4624     {
4625       switch (primitive_type) {
4626         case PrimitiveType::kBoolean:
4627           GotoIf(WordEqual(value_map, BooleanMapConstant()), &done_loop);
4628           break;
4629         case PrimitiveType::kNumber:
4630           GotoIf(WordEqual(value_map, HeapNumberMapConstant()), &done_loop);
4631           break;
4632         case PrimitiveType::kString:
4633           GotoIf(IsStringInstanceType(value_instance_type), &done_loop);
4634           break;
4635         case PrimitiveType::kSymbol:
4636           GotoIf(WordEqual(value_map, SymbolMapConstant()), &done_loop);
4637           break;
4638       }
4639       Goto(&done_throw);
4640     }
4641   }
4642 
4643   BIND(&done_throw);
4644   {
4645     const char* primitive_name = nullptr;
4646     switch (primitive_type) {
4647       case PrimitiveType::kBoolean:
4648         primitive_name = "Boolean";
4649         break;
4650       case PrimitiveType::kNumber:
4651         primitive_name = "Number";
4652         break;
4653       case PrimitiveType::kString:
4654         primitive_name = "String";
4655         break;
4656       case PrimitiveType::kSymbol:
4657         primitive_name = "Symbol";
4658         break;
4659     }
4660     CHECK_NOT_NULL(primitive_name);
4661 
4662     // The {value} is not a compatible receiver for this method.
4663     ThrowTypeError(context, MessageTemplate::kNotGeneric, method_name,
4664                    primitive_name);
4665   }
4666 
4667   BIND(&done_loop);
4668   return var_value.value();
4669 }
4670 
ThrowIfNotInstanceType(Node * context,Node * value,InstanceType instance_type,char const * method_name)4671 Node* CodeStubAssembler::ThrowIfNotInstanceType(Node* context, Node* value,
4672                                                 InstanceType instance_type,
4673                                                 char const* method_name) {
4674   Label out(this), throw_exception(this, Label::kDeferred);
4675   VARIABLE(var_value_map, MachineRepresentation::kTagged);
4676 
4677   GotoIf(TaggedIsSmi(value), &throw_exception);
4678 
4679   // Load the instance type of the {value}.
4680   var_value_map.Bind(LoadMap(value));
4681   Node* const value_instance_type = LoadMapInstanceType(var_value_map.value());
4682 
4683   Branch(Word32Equal(value_instance_type, Int32Constant(instance_type)), &out,
4684          &throw_exception);
4685 
4686   // The {value} is not a compatible receiver for this method.
4687   BIND(&throw_exception);
4688   ThrowTypeError(context, MessageTemplate::kIncompatibleMethodReceiver,
4689                  StringConstant(method_name), value);
4690 
4691   BIND(&out);
4692   return var_value_map.value();
4693 }
4694 
ThrowIfNotJSReceiver(Node * context,Node * value,MessageTemplate::Template msg_template,const char * method_name)4695 Node* CodeStubAssembler::ThrowIfNotJSReceiver(
4696     Node* context, Node* value, MessageTemplate::Template msg_template,
4697     const char* method_name) {
4698   Label out(this), throw_exception(this, Label::kDeferred);
4699   VARIABLE(var_value_map, MachineRepresentation::kTagged);
4700 
4701   GotoIf(TaggedIsSmi(value), &throw_exception);
4702 
4703   // Load the instance type of the {value}.
4704   var_value_map.Bind(LoadMap(value));
4705   Node* const value_instance_type = LoadMapInstanceType(var_value_map.value());
4706 
4707   Branch(IsJSReceiverInstanceType(value_instance_type), &out, &throw_exception);
4708 
4709   // The {value} is not a compatible receiver for this method.
4710   BIND(&throw_exception);
4711   ThrowTypeError(context, msg_template, method_name);
4712 
4713   BIND(&out);
4714   return var_value_map.value();
4715 }
4716 
ThrowRangeError(Node * context,MessageTemplate::Template message,Node * arg0,Node * arg1,Node * arg2)4717 void CodeStubAssembler::ThrowRangeError(Node* context,
4718                                         MessageTemplate::Template message,
4719                                         Node* arg0, Node* arg1, Node* arg2) {
4720   Node* template_index = SmiConstant(message);
4721   if (arg0 == nullptr) {
4722     CallRuntime(Runtime::kThrowRangeError, context, template_index);
4723   } else if (arg1 == nullptr) {
4724     CallRuntime(Runtime::kThrowRangeError, context, template_index, arg0);
4725   } else if (arg2 == nullptr) {
4726     CallRuntime(Runtime::kThrowRangeError, context, template_index, arg0, arg1);
4727   } else {
4728     CallRuntime(Runtime::kThrowRangeError, context, template_index, arg0, arg1,
4729                 arg2);
4730   }
4731   Unreachable();
4732 }
4733 
ThrowTypeError(Node * context,MessageTemplate::Template message,char const * arg0,char const * arg1)4734 void CodeStubAssembler::ThrowTypeError(Node* context,
4735                                        MessageTemplate::Template message,
4736                                        char const* arg0, char const* arg1) {
4737   Node* arg0_node = nullptr;
4738   if (arg0) arg0_node = StringConstant(arg0);
4739   Node* arg1_node = nullptr;
4740   if (arg1) arg1_node = StringConstant(arg1);
4741   ThrowTypeError(context, message, arg0_node, arg1_node);
4742 }
4743 
ThrowTypeError(Node * context,MessageTemplate::Template message,Node * arg0,Node * arg1,Node * arg2)4744 void CodeStubAssembler::ThrowTypeError(Node* context,
4745                                        MessageTemplate::Template message,
4746                                        Node* arg0, Node* arg1, Node* arg2) {
4747   Node* template_index = SmiConstant(message);
4748   if (arg0 == nullptr) {
4749     CallRuntime(Runtime::kThrowTypeError, context, template_index);
4750   } else if (arg1 == nullptr) {
4751     CallRuntime(Runtime::kThrowTypeError, context, template_index, arg0);
4752   } else if (arg2 == nullptr) {
4753     CallRuntime(Runtime::kThrowTypeError, context, template_index, arg0, arg1);
4754   } else {
4755     CallRuntime(Runtime::kThrowTypeError, context, template_index, arg0, arg1,
4756                 arg2);
4757   }
4758   Unreachable();
4759 }
4760 
InstanceTypeEqual(SloppyTNode<Int32T> instance_type,int type)4761 TNode<BoolT> CodeStubAssembler::InstanceTypeEqual(
4762     SloppyTNode<Int32T> instance_type, int type) {
4763   return Word32Equal(instance_type, Int32Constant(type));
4764 }
4765 
IsDictionaryMap(SloppyTNode<Map> map)4766 TNode<BoolT> CodeStubAssembler::IsDictionaryMap(SloppyTNode<Map> map) {
4767   CSA_SLOW_ASSERT(this, IsMap(map));
4768   Node* bit_field3 = LoadMapBitField3(map);
4769   return IsSetWord32<Map::IsDictionaryMapBit>(bit_field3);
4770 }
4771 
IsExtensibleMap(SloppyTNode<Map> map)4772 TNode<BoolT> CodeStubAssembler::IsExtensibleMap(SloppyTNode<Map> map) {
4773   CSA_ASSERT(this, IsMap(map));
4774   return IsSetWord32<Map::IsExtensibleBit>(LoadMapBitField2(map));
4775 }
4776 
IsCallableMap(SloppyTNode<Map> map)4777 TNode<BoolT> CodeStubAssembler::IsCallableMap(SloppyTNode<Map> map) {
4778   CSA_ASSERT(this, IsMap(map));
4779   return IsSetWord32<Map::IsCallableBit>(LoadMapBitField(map));
4780 }
4781 
IsDeprecatedMap(SloppyTNode<Map> map)4782 TNode<BoolT> CodeStubAssembler::IsDeprecatedMap(SloppyTNode<Map> map) {
4783   CSA_ASSERT(this, IsMap(map));
4784   return IsSetWord32<Map::IsDeprecatedBit>(LoadMapBitField3(map));
4785 }
4786 
IsUndetectableMap(SloppyTNode<Map> map)4787 TNode<BoolT> CodeStubAssembler::IsUndetectableMap(SloppyTNode<Map> map) {
4788   CSA_ASSERT(this, IsMap(map));
4789   return IsSetWord32<Map::IsUndetectableBit>(LoadMapBitField(map));
4790 }
4791 
IsNoElementsProtectorCellInvalid()4792 TNode<BoolT> CodeStubAssembler::IsNoElementsProtectorCellInvalid() {
4793   Node* invalid = SmiConstant(Isolate::kProtectorInvalid);
4794   Node* cell = LoadRoot(Heap::kNoElementsProtectorRootIndex);
4795   Node* cell_value = LoadObjectField(cell, PropertyCell::kValueOffset);
4796   return WordEqual(cell_value, invalid);
4797 }
4798 
IsPromiseResolveProtectorCellInvalid()4799 TNode<BoolT> CodeStubAssembler::IsPromiseResolveProtectorCellInvalid() {
4800   Node* invalid = SmiConstant(Isolate::kProtectorInvalid);
4801   Node* cell = LoadRoot(Heap::kPromiseResolveProtectorRootIndex);
4802   Node* cell_value = LoadObjectField(cell, Cell::kValueOffset);
4803   return WordEqual(cell_value, invalid);
4804 }
4805 
IsPromiseThenProtectorCellInvalid()4806 TNode<BoolT> CodeStubAssembler::IsPromiseThenProtectorCellInvalid() {
4807   Node* invalid = SmiConstant(Isolate::kProtectorInvalid);
4808   Node* cell = LoadRoot(Heap::kPromiseThenProtectorRootIndex);
4809   Node* cell_value = LoadObjectField(cell, PropertyCell::kValueOffset);
4810   return WordEqual(cell_value, invalid);
4811 }
4812 
IsArraySpeciesProtectorCellInvalid()4813 TNode<BoolT> CodeStubAssembler::IsArraySpeciesProtectorCellInvalid() {
4814   Node* invalid = SmiConstant(Isolate::kProtectorInvalid);
4815   Node* cell = LoadRoot(Heap::kArraySpeciesProtectorRootIndex);
4816   Node* cell_value = LoadObjectField(cell, PropertyCell::kValueOffset);
4817   return WordEqual(cell_value, invalid);
4818 }
4819 
IsTypedArraySpeciesProtectorCellInvalid()4820 TNode<BoolT> CodeStubAssembler::IsTypedArraySpeciesProtectorCellInvalid() {
4821   Node* invalid = SmiConstant(Isolate::kProtectorInvalid);
4822   Node* cell = LoadRoot(Heap::kTypedArraySpeciesProtectorRootIndex);
4823   Node* cell_value = LoadObjectField(cell, PropertyCell::kValueOffset);
4824   return WordEqual(cell_value, invalid);
4825 }
4826 
IsPromiseSpeciesProtectorCellInvalid()4827 TNode<BoolT> CodeStubAssembler::IsPromiseSpeciesProtectorCellInvalid() {
4828   Node* invalid = SmiConstant(Isolate::kProtectorInvalid);
4829   Node* cell = LoadRoot(Heap::kPromiseSpeciesProtectorRootIndex);
4830   Node* cell_value = LoadObjectField(cell, PropertyCell::kValueOffset);
4831   return WordEqual(cell_value, invalid);
4832 }
4833 
IsPrototypeInitialArrayPrototype(SloppyTNode<Context> context,SloppyTNode<Map> map)4834 TNode<BoolT> CodeStubAssembler::IsPrototypeInitialArrayPrototype(
4835     SloppyTNode<Context> context, SloppyTNode<Map> map) {
4836   Node* const native_context = LoadNativeContext(context);
4837   Node* const initial_array_prototype = LoadContextElement(
4838       native_context, Context::INITIAL_ARRAY_PROTOTYPE_INDEX);
4839   Node* proto = LoadMapPrototype(map);
4840   return WordEqual(proto, initial_array_prototype);
4841 }
4842 
IsPrototypeTypedArrayPrototype(SloppyTNode<Context> context,SloppyTNode<Map> map)4843 TNode<BoolT> CodeStubAssembler::IsPrototypeTypedArrayPrototype(
4844     SloppyTNode<Context> context, SloppyTNode<Map> map) {
4845   TNode<Context> const native_context = LoadNativeContext(context);
4846   TNode<Object> const typed_array_prototype =
4847       LoadContextElement(native_context, Context::TYPED_ARRAY_PROTOTYPE_INDEX);
4848   TNode<HeapObject> proto = LoadMapPrototype(map);
4849   TNode<HeapObject> proto_of_proto = Select<HeapObject>(
4850       IsJSObject(proto), [=] { return LoadMapPrototype(LoadMap(proto)); },
4851       [=] { return NullConstant(); });
4852   return WordEqual(proto_of_proto, typed_array_prototype);
4853 }
4854 
TaggedIsCallable(TNode<Object> object)4855 TNode<BoolT> CodeStubAssembler::TaggedIsCallable(TNode<Object> object) {
4856   return Select<BoolT>(
4857       TaggedIsSmi(object), [=] { return Int32FalseConstant(); },
4858       [=] {
4859         return IsCallableMap(LoadMap(UncheckedCast<HeapObject>(object)));
4860       });
4861 }
4862 
IsCallable(SloppyTNode<HeapObject> object)4863 TNode<BoolT> CodeStubAssembler::IsCallable(SloppyTNode<HeapObject> object) {
4864   return IsCallableMap(LoadMap(object));
4865 }
4866 
IsCell(SloppyTNode<HeapObject> object)4867 TNode<BoolT> CodeStubAssembler::IsCell(SloppyTNode<HeapObject> object) {
4868   return WordEqual(LoadMap(object), LoadRoot(Heap::kCellMapRootIndex));
4869 }
4870 
IsCode(SloppyTNode<HeapObject> object)4871 TNode<BoolT> CodeStubAssembler::IsCode(SloppyTNode<HeapObject> object) {
4872   return HasInstanceType(object, CODE_TYPE);
4873 }
4874 
IsConstructorMap(SloppyTNode<Map> map)4875 TNode<BoolT> CodeStubAssembler::IsConstructorMap(SloppyTNode<Map> map) {
4876   CSA_ASSERT(this, IsMap(map));
4877   return IsSetWord32<Map::IsConstructorBit>(LoadMapBitField(map));
4878 }
4879 
IsConstructor(SloppyTNode<HeapObject> object)4880 TNode<BoolT> CodeStubAssembler::IsConstructor(SloppyTNode<HeapObject> object) {
4881   return IsConstructorMap(LoadMap(object));
4882 }
4883 
IsFunctionWithPrototypeSlotMap(SloppyTNode<Map> map)4884 TNode<BoolT> CodeStubAssembler::IsFunctionWithPrototypeSlotMap(
4885     SloppyTNode<Map> map) {
4886   CSA_ASSERT(this, IsMap(map));
4887   return IsSetWord32<Map::HasPrototypeSlotBit>(LoadMapBitField(map));
4888 }
4889 
IsSpecialReceiverInstanceType(TNode<Int32T> instance_type)4890 TNode<BoolT> CodeStubAssembler::IsSpecialReceiverInstanceType(
4891     TNode<Int32T> instance_type) {
4892   STATIC_ASSERT(JS_GLOBAL_OBJECT_TYPE <= LAST_SPECIAL_RECEIVER_TYPE);
4893   return Int32LessThanOrEqual(instance_type,
4894                               Int32Constant(LAST_SPECIAL_RECEIVER_TYPE));
4895 }
4896 
IsCustomElementsReceiverInstanceType(TNode<Int32T> instance_type)4897 TNode<BoolT> CodeStubAssembler::IsCustomElementsReceiverInstanceType(
4898     TNode<Int32T> instance_type) {
4899   return Int32LessThanOrEqual(instance_type,
4900                               Int32Constant(LAST_CUSTOM_ELEMENTS_RECEIVER));
4901 }
4902 
IsStringInstanceType(SloppyTNode<Int32T> instance_type)4903 TNode<BoolT> CodeStubAssembler::IsStringInstanceType(
4904     SloppyTNode<Int32T> instance_type) {
4905   STATIC_ASSERT(INTERNALIZED_STRING_TYPE == FIRST_TYPE);
4906   return Int32LessThan(instance_type, Int32Constant(FIRST_NONSTRING_TYPE));
4907 }
4908 
IsOneByteStringInstanceType(SloppyTNode<Int32T> instance_type)4909 TNode<BoolT> CodeStubAssembler::IsOneByteStringInstanceType(
4910     SloppyTNode<Int32T> instance_type) {
4911   CSA_ASSERT(this, IsStringInstanceType(instance_type));
4912   return Word32Equal(
4913       Word32And(instance_type, Int32Constant(kStringEncodingMask)),
4914       Int32Constant(kOneByteStringTag));
4915 }
4916 
IsSequentialStringInstanceType(SloppyTNode<Int32T> instance_type)4917 TNode<BoolT> CodeStubAssembler::IsSequentialStringInstanceType(
4918     SloppyTNode<Int32T> instance_type) {
4919   CSA_ASSERT(this, IsStringInstanceType(instance_type));
4920   return Word32Equal(
4921       Word32And(instance_type, Int32Constant(kStringRepresentationMask)),
4922       Int32Constant(kSeqStringTag));
4923 }
4924 
IsConsStringInstanceType(SloppyTNode<Int32T> instance_type)4925 TNode<BoolT> CodeStubAssembler::IsConsStringInstanceType(
4926     SloppyTNode<Int32T> instance_type) {
4927   CSA_ASSERT(this, IsStringInstanceType(instance_type));
4928   return Word32Equal(
4929       Word32And(instance_type, Int32Constant(kStringRepresentationMask)),
4930       Int32Constant(kConsStringTag));
4931 }
4932 
IsIndirectStringInstanceType(SloppyTNode<Int32T> instance_type)4933 TNode<BoolT> CodeStubAssembler::IsIndirectStringInstanceType(
4934     SloppyTNode<Int32T> instance_type) {
4935   CSA_ASSERT(this, IsStringInstanceType(instance_type));
4936   STATIC_ASSERT(kIsIndirectStringMask == 0x1);
4937   STATIC_ASSERT(kIsIndirectStringTag == 0x1);
4938   return UncheckedCast<BoolT>(
4939       Word32And(instance_type, Int32Constant(kIsIndirectStringMask)));
4940 }
4941 
IsExternalStringInstanceType(SloppyTNode<Int32T> instance_type)4942 TNode<BoolT> CodeStubAssembler::IsExternalStringInstanceType(
4943     SloppyTNode<Int32T> instance_type) {
4944   CSA_ASSERT(this, IsStringInstanceType(instance_type));
4945   return Word32Equal(
4946       Word32And(instance_type, Int32Constant(kStringRepresentationMask)),
4947       Int32Constant(kExternalStringTag));
4948 }
4949 
IsShortExternalStringInstanceType(SloppyTNode<Int32T> instance_type)4950 TNode<BoolT> CodeStubAssembler::IsShortExternalStringInstanceType(
4951     SloppyTNode<Int32T> instance_type) {
4952   CSA_ASSERT(this, IsStringInstanceType(instance_type));
4953   STATIC_ASSERT(kShortExternalStringTag != 0);
4954   return IsSetWord32(instance_type, kShortExternalStringMask);
4955 }
4956 
IsJSReceiverInstanceType(SloppyTNode<Int32T> instance_type)4957 TNode<BoolT> CodeStubAssembler::IsJSReceiverInstanceType(
4958     SloppyTNode<Int32T> instance_type) {
4959   STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
4960   return Int32GreaterThanOrEqual(instance_type,
4961                                  Int32Constant(FIRST_JS_RECEIVER_TYPE));
4962 }
4963 
IsJSReceiverMap(SloppyTNode<Map> map)4964 TNode<BoolT> CodeStubAssembler::IsJSReceiverMap(SloppyTNode<Map> map) {
4965   return IsJSReceiverInstanceType(LoadMapInstanceType(map));
4966 }
4967 
IsJSReceiver(SloppyTNode<HeapObject> object)4968 TNode<BoolT> CodeStubAssembler::IsJSReceiver(SloppyTNode<HeapObject> object) {
4969   return IsJSReceiverMap(LoadMap(object));
4970 }
4971 
IsNullOrJSReceiver(SloppyTNode<HeapObject> object)4972 TNode<BoolT> CodeStubAssembler::IsNullOrJSReceiver(
4973     SloppyTNode<HeapObject> object) {
4974   return UncheckedCast<BoolT>(Word32Or(IsJSReceiver(object), IsNull(object)));
4975 }
4976 
IsNullOrUndefined(SloppyTNode<Object> value)4977 TNode<BoolT> CodeStubAssembler::IsNullOrUndefined(SloppyTNode<Object> value) {
4978   return UncheckedCast<BoolT>(Word32Or(IsUndefined(value), IsNull(value)));
4979 }
4980 
IsJSGlobalProxyInstanceType(SloppyTNode<Int32T> instance_type)4981 TNode<BoolT> CodeStubAssembler::IsJSGlobalProxyInstanceType(
4982     SloppyTNode<Int32T> instance_type) {
4983   return InstanceTypeEqual(instance_type, JS_GLOBAL_PROXY_TYPE);
4984 }
4985 
IsJSObjectInstanceType(SloppyTNode<Int32T> instance_type)4986 TNode<BoolT> CodeStubAssembler::IsJSObjectInstanceType(
4987     SloppyTNode<Int32T> instance_type) {
4988   STATIC_ASSERT(LAST_JS_OBJECT_TYPE == LAST_TYPE);
4989   return Int32GreaterThanOrEqual(instance_type,
4990                                  Int32Constant(FIRST_JS_OBJECT_TYPE));
4991 }
4992 
IsJSObjectMap(SloppyTNode<Map> map)4993 TNode<BoolT> CodeStubAssembler::IsJSObjectMap(SloppyTNode<Map> map) {
4994   CSA_ASSERT(this, IsMap(map));
4995   return IsJSObjectInstanceType(LoadMapInstanceType(map));
4996 }
4997 
IsJSObject(SloppyTNode<HeapObject> object)4998 TNode<BoolT> CodeStubAssembler::IsJSObject(SloppyTNode<HeapObject> object) {
4999   return IsJSObjectMap(LoadMap(object));
5000 }
5001 
IsJSPromiseMap(SloppyTNode<Map> map)5002 TNode<BoolT> CodeStubAssembler::IsJSPromiseMap(SloppyTNode<Map> map) {
5003   CSA_ASSERT(this, IsMap(map));
5004   return InstanceTypeEqual(LoadMapInstanceType(map), JS_PROMISE_TYPE);
5005 }
5006 
IsJSPromise(SloppyTNode<HeapObject> object)5007 TNode<BoolT> CodeStubAssembler::IsJSPromise(SloppyTNode<HeapObject> object) {
5008   return IsJSPromiseMap(LoadMap(object));
5009 }
5010 
IsJSProxy(SloppyTNode<HeapObject> object)5011 TNode<BoolT> CodeStubAssembler::IsJSProxy(SloppyTNode<HeapObject> object) {
5012   return HasInstanceType(object, JS_PROXY_TYPE);
5013 }
5014 
IsJSGlobalProxy(SloppyTNode<HeapObject> object)5015 TNode<BoolT> CodeStubAssembler::IsJSGlobalProxy(
5016     SloppyTNode<HeapObject> object) {
5017   return HasInstanceType(object, JS_GLOBAL_PROXY_TYPE);
5018 }
5019 
IsMap(SloppyTNode<HeapObject> map)5020 TNode<BoolT> CodeStubAssembler::IsMap(SloppyTNode<HeapObject> map) {
5021   return IsMetaMap(LoadMap(map));
5022 }
5023 
IsJSValueInstanceType(SloppyTNode<Int32T> instance_type)5024 TNode<BoolT> CodeStubAssembler::IsJSValueInstanceType(
5025     SloppyTNode<Int32T> instance_type) {
5026   return InstanceTypeEqual(instance_type, JS_VALUE_TYPE);
5027 }
5028 
IsJSValue(SloppyTNode<HeapObject> object)5029 TNode<BoolT> CodeStubAssembler::IsJSValue(SloppyTNode<HeapObject> object) {
5030   return IsJSValueMap(LoadMap(object));
5031 }
5032 
IsJSValueMap(SloppyTNode<Map> map)5033 TNode<BoolT> CodeStubAssembler::IsJSValueMap(SloppyTNode<Map> map) {
5034   return IsJSValueInstanceType(LoadMapInstanceType(map));
5035 }
5036 
IsJSArrayInstanceType(SloppyTNode<Int32T> instance_type)5037 TNode<BoolT> CodeStubAssembler::IsJSArrayInstanceType(
5038     SloppyTNode<Int32T> instance_type) {
5039   return InstanceTypeEqual(instance_type, JS_ARRAY_TYPE);
5040 }
5041 
IsJSArray(SloppyTNode<HeapObject> object)5042 TNode<BoolT> CodeStubAssembler::IsJSArray(SloppyTNode<HeapObject> object) {
5043   return IsJSArrayMap(LoadMap(object));
5044 }
5045 
IsJSArrayMap(SloppyTNode<Map> map)5046 TNode<BoolT> CodeStubAssembler::IsJSArrayMap(SloppyTNode<Map> map) {
5047   return IsJSArrayInstanceType(LoadMapInstanceType(map));
5048 }
5049 
IsJSArrayIterator(SloppyTNode<HeapObject> object)5050 TNode<BoolT> CodeStubAssembler::IsJSArrayIterator(
5051     SloppyTNode<HeapObject> object) {
5052   return HasInstanceType(object, JS_ARRAY_ITERATOR_TYPE);
5053 }
5054 
IsJSAsyncGeneratorObject(SloppyTNode<HeapObject> object)5055 TNode<BoolT> CodeStubAssembler::IsJSAsyncGeneratorObject(
5056     SloppyTNode<HeapObject> object) {
5057   return HasInstanceType(object, JS_ASYNC_GENERATOR_OBJECT_TYPE);
5058 }
5059 
IsContext(SloppyTNode<HeapObject> object)5060 TNode<BoolT> CodeStubAssembler::IsContext(SloppyTNode<HeapObject> object) {
5061   Node* instance_type = LoadInstanceType(object);
5062   return UncheckedCast<BoolT>(Word32And(
5063       Int32GreaterThanOrEqual(instance_type, Int32Constant(FIRST_CONTEXT_TYPE)),
5064       Int32LessThanOrEqual(instance_type, Int32Constant(LAST_CONTEXT_TYPE))));
5065 }
5066 
IsFixedArray(SloppyTNode<HeapObject> object)5067 TNode<BoolT> CodeStubAssembler::IsFixedArray(SloppyTNode<HeapObject> object) {
5068   return HasInstanceType(object, FIXED_ARRAY_TYPE);
5069 }
5070 
IsFixedArraySubclass(SloppyTNode<HeapObject> object)5071 TNode<BoolT> CodeStubAssembler::IsFixedArraySubclass(
5072     SloppyTNode<HeapObject> object) {
5073   Node* instance_type = LoadInstanceType(object);
5074   return UncheckedCast<BoolT>(
5075       Word32And(Int32GreaterThanOrEqual(instance_type,
5076                                         Int32Constant(FIRST_FIXED_ARRAY_TYPE)),
5077                 Int32LessThanOrEqual(instance_type,
5078                                      Int32Constant(LAST_FIXED_ARRAY_TYPE))));
5079 }
5080 
IsNotWeakFixedArraySubclass(SloppyTNode<HeapObject> object)5081 TNode<BoolT> CodeStubAssembler::IsNotWeakFixedArraySubclass(
5082     SloppyTNode<HeapObject> object) {
5083   Node* instance_type = LoadInstanceType(object);
5084   return UncheckedCast<BoolT>(Word32Or(
5085       Int32LessThan(instance_type, Int32Constant(FIRST_WEAK_FIXED_ARRAY_TYPE)),
5086       Int32GreaterThan(instance_type,
5087                        Int32Constant(LAST_WEAK_FIXED_ARRAY_TYPE))));
5088 }
5089 
IsPromiseCapability(SloppyTNode<HeapObject> object)5090 TNode<BoolT> CodeStubAssembler::IsPromiseCapability(
5091     SloppyTNode<HeapObject> object) {
5092   return HasInstanceType(object, PROMISE_CAPABILITY_TYPE);
5093 }
5094 
IsPropertyArray(SloppyTNode<HeapObject> object)5095 TNode<BoolT> CodeStubAssembler::IsPropertyArray(
5096     SloppyTNode<HeapObject> object) {
5097   return HasInstanceType(object, PROPERTY_ARRAY_TYPE);
5098 }
5099 
5100 // This complicated check is due to elements oddities. If a smi array is empty
5101 // after Array.p.shift, it is replaced by the empty array constant. If it is
5102 // later filled with a double element, we try to grow it but pass in a double
5103 // elements kind. Usually this would cause a size mismatch (since the source
5104 // fixed array has HOLEY_ELEMENTS and destination has
5105 // HOLEY_DOUBLE_ELEMENTS), but we don't have to worry about it when the
5106 // source array is empty.
5107 // TODO(jgruber): It might we worth creating an empty_double_array constant to
5108 // simplify this case.
IsFixedArrayWithKindOrEmpty(SloppyTNode<HeapObject> object,ElementsKind kind)5109 TNode<BoolT> CodeStubAssembler::IsFixedArrayWithKindOrEmpty(
5110     SloppyTNode<HeapObject> object, ElementsKind kind) {
5111   Label out(this);
5112   TVARIABLE(BoolT, var_result, Int32TrueConstant());
5113 
5114   GotoIf(IsFixedArrayWithKind(object, kind), &out);
5115 
5116   TNode<Smi> const length = LoadFixedArrayBaseLength(CAST(object));
5117   GotoIf(SmiEqual(length, SmiConstant(0)), &out);
5118 
5119   var_result = Int32FalseConstant();
5120   Goto(&out);
5121 
5122   BIND(&out);
5123   return var_result.value();
5124 }
5125 
IsFixedArrayWithKind(SloppyTNode<HeapObject> object,ElementsKind kind)5126 TNode<BoolT> CodeStubAssembler::IsFixedArrayWithKind(
5127     SloppyTNode<HeapObject> object, ElementsKind kind) {
5128   if (IsDoubleElementsKind(kind)) {
5129     return IsFixedDoubleArray(object);
5130   } else {
5131     DCHECK(IsSmiOrObjectElementsKind(kind));
5132     return IsFixedArraySubclass(object);
5133   }
5134 }
5135 
IsWeakCell(SloppyTNode<HeapObject> object)5136 TNode<BoolT> CodeStubAssembler::IsWeakCell(SloppyTNode<HeapObject> object) {
5137   CSA_ASSERT(this, IsStrongHeapObject(object));
5138   return IsWeakCellMap(LoadMap(object));
5139 }
5140 
IsBoolean(SloppyTNode<HeapObject> object)5141 TNode<BoolT> CodeStubAssembler::IsBoolean(SloppyTNode<HeapObject> object) {
5142   return IsBooleanMap(LoadMap(object));
5143 }
5144 
IsPropertyCell(SloppyTNode<HeapObject> object)5145 TNode<BoolT> CodeStubAssembler::IsPropertyCell(SloppyTNode<HeapObject> object) {
5146   return IsPropertyCellMap(LoadMap(object));
5147 }
5148 
IsAccessorInfo(SloppyTNode<HeapObject> object)5149 TNode<BoolT> CodeStubAssembler::IsAccessorInfo(SloppyTNode<HeapObject> object) {
5150   return IsAccessorInfoMap(LoadMap(object));
5151 }
5152 
IsAccessorPair(SloppyTNode<HeapObject> object)5153 TNode<BoolT> CodeStubAssembler::IsAccessorPair(SloppyTNode<HeapObject> object) {
5154   return IsAccessorPairMap(LoadMap(object));
5155 }
5156 
IsAllocationSite(SloppyTNode<HeapObject> object)5157 TNode<BoolT> CodeStubAssembler::IsAllocationSite(
5158     SloppyTNode<HeapObject> object) {
5159   return IsAllocationSiteMap(LoadMap(object));
5160 }
5161 
IsAnyHeapNumber(SloppyTNode<HeapObject> object)5162 TNode<BoolT> CodeStubAssembler::IsAnyHeapNumber(
5163     SloppyTNode<HeapObject> object) {
5164   return UncheckedCast<BoolT>(
5165       Word32Or(IsMutableHeapNumber(object), IsHeapNumber(object)));
5166 }
5167 
IsHeapNumber(SloppyTNode<HeapObject> object)5168 TNode<BoolT> CodeStubAssembler::IsHeapNumber(SloppyTNode<HeapObject> object) {
5169   return IsHeapNumberMap(LoadMap(object));
5170 }
5171 
IsMutableHeapNumber(SloppyTNode<HeapObject> object)5172 TNode<BoolT> CodeStubAssembler::IsMutableHeapNumber(
5173     SloppyTNode<HeapObject> object) {
5174   return IsMutableHeapNumberMap(LoadMap(object));
5175 }
5176 
IsFeedbackCell(SloppyTNode<HeapObject> object)5177 TNode<BoolT> CodeStubAssembler::IsFeedbackCell(SloppyTNode<HeapObject> object) {
5178   return HasInstanceType(object, FEEDBACK_CELL_TYPE);
5179 }
5180 
IsFeedbackVector(SloppyTNode<HeapObject> object)5181 TNode<BoolT> CodeStubAssembler::IsFeedbackVector(
5182     SloppyTNode<HeapObject> object) {
5183   return IsFeedbackVectorMap(LoadMap(object));
5184 }
5185 
IsName(SloppyTNode<HeapObject> object)5186 TNode<BoolT> CodeStubAssembler::IsName(SloppyTNode<HeapObject> object) {
5187   return Int32LessThanOrEqual(LoadInstanceType(object),
5188                               Int32Constant(LAST_NAME_TYPE));
5189 }
5190 
IsString(SloppyTNode<HeapObject> object)5191 TNode<BoolT> CodeStubAssembler::IsString(SloppyTNode<HeapObject> object) {
5192   return IsStringInstanceType(LoadInstanceType(object));
5193 }
5194 
IsSymbolInstanceType(SloppyTNode<Int32T> instance_type)5195 TNode<BoolT> CodeStubAssembler::IsSymbolInstanceType(
5196     SloppyTNode<Int32T> instance_type) {
5197   return InstanceTypeEqual(instance_type, SYMBOL_TYPE);
5198 }
5199 
IsSymbol(SloppyTNode<HeapObject> object)5200 TNode<BoolT> CodeStubAssembler::IsSymbol(SloppyTNode<HeapObject> object) {
5201   return IsSymbolMap(LoadMap(object));
5202 }
5203 
IsBigIntInstanceType(SloppyTNode<Int32T> instance_type)5204 TNode<BoolT> CodeStubAssembler::IsBigIntInstanceType(
5205     SloppyTNode<Int32T> instance_type) {
5206   return InstanceTypeEqual(instance_type, BIGINT_TYPE);
5207 }
5208 
IsBigInt(SloppyTNode<HeapObject> object)5209 TNode<BoolT> CodeStubAssembler::IsBigInt(SloppyTNode<HeapObject> object) {
5210   return IsBigIntInstanceType(LoadInstanceType(object));
5211 }
5212 
IsPrimitiveInstanceType(SloppyTNode<Int32T> instance_type)5213 TNode<BoolT> CodeStubAssembler::IsPrimitiveInstanceType(
5214     SloppyTNode<Int32T> instance_type) {
5215   return Int32LessThanOrEqual(instance_type,
5216                               Int32Constant(LAST_PRIMITIVE_TYPE));
5217 }
5218 
IsPrivateSymbol(SloppyTNode<HeapObject> object)5219 TNode<BoolT> CodeStubAssembler::IsPrivateSymbol(
5220     SloppyTNode<HeapObject> object) {
5221   return Select<BoolT>(
5222       IsSymbol(object),
5223       [=] {
5224         TNode<Symbol> symbol = CAST(object);
5225         TNode<Int32T> flags =
5226             SmiToInt32(LoadObjectField<Smi>(symbol, Symbol::kFlagsOffset));
5227         return IsSetWord32(flags, 1 << Symbol::kPrivateBit);
5228       },
5229       [=] { return Int32FalseConstant(); });
5230 }
5231 
IsNativeContext(SloppyTNode<HeapObject> object)5232 TNode<BoolT> CodeStubAssembler::IsNativeContext(
5233     SloppyTNode<HeapObject> object) {
5234   return WordEqual(LoadMap(object), LoadRoot(Heap::kNativeContextMapRootIndex));
5235 }
5236 
IsFixedDoubleArray(SloppyTNode<HeapObject> object)5237 TNode<BoolT> CodeStubAssembler::IsFixedDoubleArray(
5238     SloppyTNode<HeapObject> object) {
5239   return WordEqual(LoadMap(object), FixedDoubleArrayMapConstant());
5240 }
5241 
IsHashTable(SloppyTNode<HeapObject> object)5242 TNode<BoolT> CodeStubAssembler::IsHashTable(SloppyTNode<HeapObject> object) {
5243   return HasInstanceType(object, HASH_TABLE_TYPE);
5244 }
5245 
IsDictionary(SloppyTNode<HeapObject> object)5246 TNode<BoolT> CodeStubAssembler::IsDictionary(SloppyTNode<HeapObject> object) {
5247   return UncheckedCast<BoolT>(
5248       Word32Or(IsHashTable(object), IsNumberDictionary(object)));
5249 }
5250 
IsNumberDictionary(SloppyTNode<HeapObject> object)5251 TNode<BoolT> CodeStubAssembler::IsNumberDictionary(
5252     SloppyTNode<HeapObject> object) {
5253   return WordEqual(LoadMap(object),
5254                    LoadRoot(Heap::kNumberDictionaryMapRootIndex));
5255 }
5256 
IsJSGeneratorObject(SloppyTNode<HeapObject> object)5257 TNode<BoolT> CodeStubAssembler::IsJSGeneratorObject(
5258     SloppyTNode<HeapObject> object) {
5259   return HasInstanceType(object, JS_GENERATOR_OBJECT_TYPE);
5260 }
5261 
IsJSFunctionInstanceType(SloppyTNode<Int32T> instance_type)5262 TNode<BoolT> CodeStubAssembler::IsJSFunctionInstanceType(
5263     SloppyTNode<Int32T> instance_type) {
5264   return InstanceTypeEqual(instance_type, JS_FUNCTION_TYPE);
5265 }
5266 
IsJSFunction(SloppyTNode<HeapObject> object)5267 TNode<BoolT> CodeStubAssembler::IsJSFunction(SloppyTNode<HeapObject> object) {
5268   return IsJSFunctionMap(LoadMap(object));
5269 }
5270 
IsJSFunctionMap(SloppyTNode<Map> map)5271 TNode<BoolT> CodeStubAssembler::IsJSFunctionMap(SloppyTNode<Map> map) {
5272   return IsJSFunctionInstanceType(LoadMapInstanceType(map));
5273 }
5274 
IsJSTypedArray(SloppyTNode<HeapObject> object)5275 TNode<BoolT> CodeStubAssembler::IsJSTypedArray(SloppyTNode<HeapObject> object) {
5276   return HasInstanceType(object, JS_TYPED_ARRAY_TYPE);
5277 }
5278 
IsJSArrayBuffer(SloppyTNode<HeapObject> object)5279 TNode<BoolT> CodeStubAssembler::IsJSArrayBuffer(
5280     SloppyTNode<HeapObject> object) {
5281   return HasInstanceType(object, JS_ARRAY_BUFFER_TYPE);
5282 }
5283 
IsFixedTypedArray(SloppyTNode<HeapObject> object)5284 TNode<BoolT> CodeStubAssembler::IsFixedTypedArray(
5285     SloppyTNode<HeapObject> object) {
5286   TNode<Int32T> instance_type = LoadInstanceType(object);
5287   return UncheckedCast<BoolT>(Word32And(
5288       Int32GreaterThanOrEqual(instance_type,
5289                               Int32Constant(FIRST_FIXED_TYPED_ARRAY_TYPE)),
5290       Int32LessThanOrEqual(instance_type,
5291                            Int32Constant(LAST_FIXED_TYPED_ARRAY_TYPE))));
5292 }
5293 
IsJSRegExp(SloppyTNode<HeapObject> object)5294 TNode<BoolT> CodeStubAssembler::IsJSRegExp(SloppyTNode<HeapObject> object) {
5295   return HasInstanceType(object, JS_REGEXP_TYPE);
5296 }
5297 
IsNumber(SloppyTNode<Object> object)5298 TNode<BoolT> CodeStubAssembler::IsNumber(SloppyTNode<Object> object) {
5299   return Select<BoolT>(TaggedIsSmi(object), [=] { return Int32TrueConstant(); },
5300                        [=] { return IsHeapNumber(CAST(object)); });
5301 }
5302 
IsNumeric(SloppyTNode<Object> object)5303 TNode<BoolT> CodeStubAssembler::IsNumeric(SloppyTNode<Object> object) {
5304   return Select<BoolT>(
5305       TaggedIsSmi(object), [=] { return Int32TrueConstant(); },
5306       [=] {
5307         return UncheckedCast<BoolT>(
5308             Word32Or(IsHeapNumber(CAST(object)), IsBigInt(CAST(object))));
5309       });
5310 }
5311 
IsNumberNormalized(SloppyTNode<Number> number)5312 TNode<BoolT> CodeStubAssembler::IsNumberNormalized(SloppyTNode<Number> number) {
5313   TVARIABLE(BoolT, var_result, Int32TrueConstant());
5314   Label out(this);
5315 
5316   GotoIf(TaggedIsSmi(number), &out);
5317 
5318   TNode<Float64T> value = LoadHeapNumberValue(CAST(number));
5319   TNode<Float64T> smi_min =
5320       Float64Constant(static_cast<double>(Smi::kMinValue));
5321   TNode<Float64T> smi_max =
5322       Float64Constant(static_cast<double>(Smi::kMaxValue));
5323 
5324   GotoIf(Float64LessThan(value, smi_min), &out);
5325   GotoIf(Float64GreaterThan(value, smi_max), &out);
5326   GotoIfNot(Float64Equal(value, value), &out);  // NaN.
5327 
5328   var_result = Int32FalseConstant();
5329   Goto(&out);
5330 
5331   BIND(&out);
5332   return var_result.value();
5333 }
5334 
IsNumberPositive(SloppyTNode<Number> number)5335 TNode<BoolT> CodeStubAssembler::IsNumberPositive(SloppyTNode<Number> number) {
5336   TNode<Float64T> float_zero = Float64Constant(0.);
5337   return Select<BoolT>(TaggedIsSmi(number),
5338                        [=] { return TaggedIsPositiveSmi(number); },
5339                        [=] {
5340                          TNode<Float64T> v = LoadHeapNumberValue(CAST(number));
5341                          return Float64GreaterThanOrEqual(v, float_zero);
5342                        });
5343 }
5344 
IsNumberArrayIndex(SloppyTNode<Number> number)5345 TNode<BoolT> CodeStubAssembler::IsNumberArrayIndex(SloppyTNode<Number> number) {
5346   TVARIABLE(BoolT, var_result, Int32TrueConstant());
5347 
5348   Label check_upper_bound(this), check_is_integer(this), out(this),
5349       return_false(this);
5350 
5351   GotoIfNumberGreaterThanOrEqual(number, NumberConstant(0), &check_upper_bound);
5352   Goto(&return_false);
5353 
5354   BIND(&check_upper_bound);
5355   GotoIfNumberGreaterThanOrEqual(number, NumberConstant(kMaxUInt32),
5356                                  &return_false);
5357   Goto(&check_is_integer);
5358 
5359   BIND(&check_is_integer);
5360   GotoIf(TaggedIsSmi(number), &out);
5361   // Check that the HeapNumber is a valid uint32
5362   TNode<Float64T> value = LoadHeapNumberValue(CAST(number));
5363   TNode<Uint32T> int_value = ChangeFloat64ToUint32(value);
5364   GotoIf(Float64Equal(value, ChangeUint32ToFloat64(int_value)), &out);
5365   Goto(&return_false);
5366 
5367   BIND(&return_false);
5368   var_result = Int32FalseConstant();
5369   Goto(&out);
5370 
5371   BIND(&out);
5372   return var_result.value();
5373 }
5374 
FixedArraySizeDoesntFitInNewSpace(Node * element_count,int base_size,ParameterMode mode)5375 Node* CodeStubAssembler::FixedArraySizeDoesntFitInNewSpace(Node* element_count,
5376                                                            int base_size,
5377                                                            ParameterMode mode) {
5378   int max_newspace_elements =
5379       (kMaxRegularHeapObjectSize - base_size) / kPointerSize;
5380   return IntPtrOrSmiGreaterThan(
5381       element_count, IntPtrOrSmiConstant(max_newspace_elements, mode), mode);
5382 }
5383 
StringCharCodeAt(SloppyTNode<String> string,SloppyTNode<IntPtrT> index)5384 TNode<Int32T> CodeStubAssembler::StringCharCodeAt(SloppyTNode<String> string,
5385                                                   SloppyTNode<IntPtrT> index) {
5386   CSA_ASSERT(this, IsString(string));
5387 
5388   CSA_ASSERT(this, IntPtrGreaterThanOrEqual(index, IntPtrConstant(0)));
5389   CSA_ASSERT(this, IntPtrLessThan(index, LoadStringLengthAsWord(string)));
5390 
5391   TVARIABLE(Int32T, var_result);
5392 
5393   Label return_result(this), if_runtime(this, Label::kDeferred),
5394       if_stringistwobyte(this), if_stringisonebyte(this);
5395 
5396   ToDirectStringAssembler to_direct(state(), string);
5397   to_direct.TryToDirect(&if_runtime);
5398   Node* const offset = IntPtrAdd(index, to_direct.offset());
5399   Node* const instance_type = to_direct.instance_type();
5400 
5401   Node* const string_data = to_direct.PointerToData(&if_runtime);
5402 
5403   // Check if the {string} is a TwoByteSeqString or a OneByteSeqString.
5404   Branch(IsOneByteStringInstanceType(instance_type), &if_stringisonebyte,
5405          &if_stringistwobyte);
5406 
5407   BIND(&if_stringisonebyte);
5408   {
5409     var_result =
5410         UncheckedCast<Int32T>(Load(MachineType::Uint8(), string_data, offset));
5411     Goto(&return_result);
5412   }
5413 
5414   BIND(&if_stringistwobyte);
5415   {
5416     var_result =
5417         UncheckedCast<Int32T>(Load(MachineType::Uint16(), string_data,
5418                                    WordShl(offset, IntPtrConstant(1))));
5419     Goto(&return_result);
5420   }
5421 
5422   BIND(&if_runtime);
5423   {
5424     Node* result = CallRuntime(Runtime::kStringCharCodeAt, NoContextConstant(),
5425                                string, SmiTag(index));
5426     var_result = SmiToInt32(result);
5427     Goto(&return_result);
5428   }
5429 
5430   BIND(&return_result);
5431   return var_result.value();
5432 }
5433 
StringFromSingleCharCode(TNode<Int32T> code)5434 TNode<String> CodeStubAssembler::StringFromSingleCharCode(TNode<Int32T> code) {
5435   VARIABLE(var_result, MachineRepresentation::kTagged);
5436 
5437   // Check if the {code} is a one-byte char code.
5438   Label if_codeisonebyte(this), if_codeistwobyte(this, Label::kDeferred),
5439       if_done(this);
5440   Branch(Int32LessThanOrEqual(code, Int32Constant(String::kMaxOneByteCharCode)),
5441          &if_codeisonebyte, &if_codeistwobyte);
5442   BIND(&if_codeisonebyte);
5443   {
5444     // Load the isolate wide single character string cache.
5445     Node* cache = LoadRoot(Heap::kSingleCharacterStringCacheRootIndex);
5446     Node* code_index = ChangeUint32ToWord(code);
5447 
5448     // Check if we have an entry for the {code} in the single character string
5449     // cache already.
5450     Label if_entryisundefined(this, Label::kDeferred),
5451         if_entryisnotundefined(this);
5452     Node* entry = LoadFixedArrayElement(cache, code_index);
5453     Branch(IsUndefined(entry), &if_entryisundefined, &if_entryisnotundefined);
5454 
5455     BIND(&if_entryisundefined);
5456     {
5457       // Allocate a new SeqOneByteString for {code} and store it in the {cache}.
5458       Node* result = AllocateSeqOneByteString(1);
5459       StoreNoWriteBarrier(
5460           MachineRepresentation::kWord8, result,
5461           IntPtrConstant(SeqOneByteString::kHeaderSize - kHeapObjectTag), code);
5462       StoreFixedArrayElement(cache, code_index, result);
5463       var_result.Bind(result);
5464       Goto(&if_done);
5465     }
5466 
5467     BIND(&if_entryisnotundefined);
5468     {
5469       // Return the entry from the {cache}.
5470       var_result.Bind(entry);
5471       Goto(&if_done);
5472     }
5473   }
5474 
5475   BIND(&if_codeistwobyte);
5476   {
5477     // Allocate a new SeqTwoByteString for {code}.
5478     Node* result = AllocateSeqTwoByteString(1);
5479     StoreNoWriteBarrier(
5480         MachineRepresentation::kWord16, result,
5481         IntPtrConstant(SeqTwoByteString::kHeaderSize - kHeapObjectTag), code);
5482     var_result.Bind(result);
5483     Goto(&if_done);
5484   }
5485 
5486   BIND(&if_done);
5487   CSA_ASSERT(this, IsString(var_result.value()));
5488   return CAST(var_result.value());
5489 }
5490 
5491 // A wrapper around CopyStringCharacters which determines the correct string
5492 // encoding, allocates a corresponding sequential string, and then copies the
5493 // given character range using CopyStringCharacters.
5494 // |from_string| must be a sequential string.
5495 // 0 <= |from_index| <= |from_index| + |character_count| < from_string.length.
AllocAndCopyStringCharacters(Node * from,Node * from_instance_type,TNode<IntPtrT> from_index,TNode<Smi> character_count)5496 TNode<String> CodeStubAssembler::AllocAndCopyStringCharacters(
5497     Node* from, Node* from_instance_type, TNode<IntPtrT> from_index,
5498     TNode<Smi> character_count) {
5499   Label end(this), one_byte_sequential(this), two_byte_sequential(this);
5500   TVARIABLE(String, var_result);
5501 
5502   Branch(IsOneByteStringInstanceType(from_instance_type), &one_byte_sequential,
5503          &two_byte_sequential);
5504 
5505   // The subject string is a sequential one-byte string.
5506   BIND(&one_byte_sequential);
5507   {
5508     TNode<String> result =
5509         AllocateSeqOneByteString(NoContextConstant(), character_count);
5510     CopyStringCharacters(from, result, from_index, IntPtrConstant(0),
5511                          SmiUntag(character_count), String::ONE_BYTE_ENCODING,
5512                          String::ONE_BYTE_ENCODING);
5513     var_result = result;
5514     Goto(&end);
5515   }
5516 
5517   // The subject string is a sequential two-byte string.
5518   BIND(&two_byte_sequential);
5519   {
5520     TNode<String> result =
5521         AllocateSeqTwoByteString(NoContextConstant(), character_count);
5522     CopyStringCharacters(from, result, from_index, IntPtrConstant(0),
5523                          SmiUntag(character_count), String::TWO_BYTE_ENCODING,
5524                          String::TWO_BYTE_ENCODING);
5525     var_result = result;
5526     Goto(&end);
5527   }
5528 
5529   BIND(&end);
5530   return var_result.value();
5531 }
5532 
SubString(TNode<String> string,TNode<IntPtrT> from,TNode<IntPtrT> to)5533 TNode<String> CodeStubAssembler::SubString(TNode<String> string,
5534                                            TNode<IntPtrT> from,
5535                                            TNode<IntPtrT> to) {
5536   TVARIABLE(String, var_result);
5537   ToDirectStringAssembler to_direct(state(), string);
5538   Label end(this), runtime(this);
5539 
5540   TNode<IntPtrT> const substr_length = IntPtrSub(to, from);
5541   TNode<IntPtrT> const string_length = LoadStringLengthAsWord(string);
5542 
5543   // Begin dispatching based on substring length.
5544 
5545   Label original_string_or_invalid_length(this);
5546   GotoIf(UintPtrGreaterThanOrEqual(substr_length, string_length),
5547          &original_string_or_invalid_length);
5548 
5549   // A real substring (substr_length < string_length).
5550 
5551   Label single_char(this);
5552   GotoIf(IntPtrEqual(substr_length, IntPtrConstant(1)), &single_char);
5553 
5554   // TODO(jgruber): Add an additional case for substring of length == 0?
5555 
5556   // Deal with different string types: update the index if necessary
5557   // and extract the underlying string.
5558 
5559   TNode<String> direct_string = to_direct.TryToDirect(&runtime);
5560   TNode<IntPtrT> offset = IntPtrAdd(from, to_direct.offset());
5561   Node* const instance_type = to_direct.instance_type();
5562 
5563   // The subject string can only be external or sequential string of either
5564   // encoding at this point.
5565   Label external_string(this);
5566   {
5567     if (FLAG_string_slices) {
5568       Label next(this);
5569 
5570       // Short slice.  Copy instead of slicing.
5571       GotoIf(IntPtrLessThan(substr_length,
5572                             IntPtrConstant(SlicedString::kMinLength)),
5573              &next);
5574 
5575       // Allocate new sliced string.
5576 
5577       Counters* counters = isolate()->counters();
5578       IncrementCounter(counters->sub_string_native(), 1);
5579 
5580       Label one_byte_slice(this), two_byte_slice(this);
5581       Branch(IsOneByteStringInstanceType(to_direct.instance_type()),
5582              &one_byte_slice, &two_byte_slice);
5583 
5584       BIND(&one_byte_slice);
5585       {
5586         var_result = AllocateSlicedOneByteString(SmiTag(substr_length),
5587                                                  direct_string, SmiTag(offset));
5588         Goto(&end);
5589       }
5590 
5591       BIND(&two_byte_slice);
5592       {
5593         var_result = AllocateSlicedTwoByteString(SmiTag(substr_length),
5594                                                  direct_string, SmiTag(offset));
5595         Goto(&end);
5596       }
5597 
5598       BIND(&next);
5599     }
5600 
5601     // The subject string can only be external or sequential string of either
5602     // encoding at this point.
5603     GotoIf(to_direct.is_external(), &external_string);
5604 
5605     var_result = AllocAndCopyStringCharacters(direct_string, instance_type,
5606                                               offset, SmiTag(substr_length));
5607 
5608     Counters* counters = isolate()->counters();
5609     IncrementCounter(counters->sub_string_native(), 1);
5610 
5611     Goto(&end);
5612   }
5613 
5614   // Handle external string.
5615   BIND(&external_string);
5616   {
5617     Node* const fake_sequential_string = to_direct.PointerToString(&runtime);
5618 
5619     var_result = AllocAndCopyStringCharacters(
5620         fake_sequential_string, instance_type, offset, SmiTag(substr_length));
5621 
5622     Counters* counters = isolate()->counters();
5623     IncrementCounter(counters->sub_string_native(), 1);
5624 
5625     Goto(&end);
5626   }
5627 
5628   // Substrings of length 1 are generated through CharCodeAt and FromCharCode.
5629   BIND(&single_char);
5630   {
5631     TNode<Int32T> char_code = StringCharCodeAt(string, from);
5632     var_result = StringFromSingleCharCode(char_code);
5633     Goto(&end);
5634   }
5635 
5636   BIND(&original_string_or_invalid_length);
5637   {
5638     CSA_ASSERT(this, IntPtrEqual(substr_length, string_length));
5639 
5640     // Equal length - check if {from, to} == {0, str.length}.
5641     GotoIf(UintPtrGreaterThan(from, IntPtrConstant(0)), &runtime);
5642 
5643     // Return the original string (substr_length == string_length).
5644 
5645     Counters* counters = isolate()->counters();
5646     IncrementCounter(counters->sub_string_native(), 1);
5647 
5648     var_result = string;
5649     Goto(&end);
5650   }
5651 
5652   // Fall back to a runtime call.
5653   BIND(&runtime);
5654   {
5655     var_result =
5656         CAST(CallRuntime(Runtime::kStringSubstring, NoContextConstant(), string,
5657                          SmiTag(from), SmiTag(to)));
5658     Goto(&end);
5659   }
5660 
5661   BIND(&end);
5662   return var_result.value();
5663 }
5664 
ToDirectStringAssembler(compiler::CodeAssemblerState * state,Node * string,Flags flags)5665 ToDirectStringAssembler::ToDirectStringAssembler(
5666     compiler::CodeAssemblerState* state, Node* string, Flags flags)
5667     : CodeStubAssembler(state),
5668       var_string_(this, MachineRepresentation::kTagged, string),
5669       var_instance_type_(this, MachineRepresentation::kWord32),
5670       var_offset_(this, MachineType::PointerRepresentation()),
5671       var_is_external_(this, MachineRepresentation::kWord32),
5672       flags_(flags) {
5673   CSA_ASSERT(this, TaggedIsNotSmi(string));
5674   CSA_ASSERT(this, IsString(string));
5675 
5676   var_string_.Bind(string);
5677   var_offset_.Bind(IntPtrConstant(0));
5678   var_instance_type_.Bind(LoadInstanceType(string));
5679   var_is_external_.Bind(Int32Constant(0));
5680 }
5681 
TryToDirect(Label * if_bailout)5682 TNode<String> ToDirectStringAssembler::TryToDirect(Label* if_bailout) {
5683   VariableList vars({&var_string_, &var_offset_, &var_instance_type_}, zone());
5684   Label dispatch(this, vars);
5685   Label if_iscons(this);
5686   Label if_isexternal(this);
5687   Label if_issliced(this);
5688   Label if_isthin(this);
5689   Label out(this);
5690 
5691   Branch(IsSequentialStringInstanceType(var_instance_type_.value()), &out,
5692          &dispatch);
5693 
5694   // Dispatch based on string representation.
5695   BIND(&dispatch);
5696   {
5697     int32_t values[] = {
5698         kSeqStringTag,    kConsStringTag, kExternalStringTag,
5699         kSlicedStringTag, kThinStringTag,
5700     };
5701     Label* labels[] = {
5702         &out, &if_iscons, &if_isexternal, &if_issliced, &if_isthin,
5703     };
5704     STATIC_ASSERT(arraysize(values) == arraysize(labels));
5705 
5706     Node* const representation = Word32And(
5707         var_instance_type_.value(), Int32Constant(kStringRepresentationMask));
5708     Switch(representation, if_bailout, values, labels, arraysize(values));
5709   }
5710 
5711   // Cons string.  Check whether it is flat, then fetch first part.
5712   // Flat cons strings have an empty second part.
5713   BIND(&if_iscons);
5714   {
5715     Node* const string = var_string_.value();
5716     GotoIfNot(IsEmptyString(LoadObjectField(string, ConsString::kSecondOffset)),
5717               if_bailout);
5718 
5719     Node* const lhs = LoadObjectField(string, ConsString::kFirstOffset);
5720     var_string_.Bind(lhs);
5721     var_instance_type_.Bind(LoadInstanceType(lhs));
5722 
5723     Goto(&dispatch);
5724   }
5725 
5726   // Sliced string. Fetch parent and correct start index by offset.
5727   BIND(&if_issliced);
5728   {
5729     if (!FLAG_string_slices || (flags_ & kDontUnpackSlicedStrings)) {
5730       Goto(if_bailout);
5731     } else {
5732       Node* const string = var_string_.value();
5733       Node* const sliced_offset =
5734           LoadAndUntagObjectField(string, SlicedString::kOffsetOffset);
5735       var_offset_.Bind(IntPtrAdd(var_offset_.value(), sliced_offset));
5736 
5737       Node* const parent = LoadObjectField(string, SlicedString::kParentOffset);
5738       var_string_.Bind(parent);
5739       var_instance_type_.Bind(LoadInstanceType(parent));
5740 
5741       Goto(&dispatch);
5742     }
5743   }
5744 
5745   // Thin string. Fetch the actual string.
5746   BIND(&if_isthin);
5747   {
5748     Node* const string = var_string_.value();
5749     Node* const actual_string =
5750         LoadObjectField(string, ThinString::kActualOffset);
5751     Node* const actual_instance_type = LoadInstanceType(actual_string);
5752 
5753     var_string_.Bind(actual_string);
5754     var_instance_type_.Bind(actual_instance_type);
5755 
5756     Goto(&dispatch);
5757   }
5758 
5759   // External string.
5760   BIND(&if_isexternal);
5761   var_is_external_.Bind(Int32Constant(1));
5762   Goto(&out);
5763 
5764   BIND(&out);
5765   return CAST(var_string_.value());
5766 }
5767 
TryToSequential(StringPointerKind ptr_kind,Label * if_bailout)5768 Node* ToDirectStringAssembler::TryToSequential(StringPointerKind ptr_kind,
5769                                                Label* if_bailout) {
5770   CHECK(ptr_kind == PTR_TO_DATA || ptr_kind == PTR_TO_STRING);
5771 
5772   VARIABLE(var_result, MachineType::PointerRepresentation());
5773   Label out(this), if_issequential(this), if_isexternal(this, Label::kDeferred);
5774   Branch(is_external(), &if_isexternal, &if_issequential);
5775 
5776   BIND(&if_issequential);
5777   {
5778     STATIC_ASSERT(SeqOneByteString::kHeaderSize ==
5779                   SeqTwoByteString::kHeaderSize);
5780     Node* result = BitcastTaggedToWord(var_string_.value());
5781     if (ptr_kind == PTR_TO_DATA) {
5782       result = IntPtrAdd(result, IntPtrConstant(SeqOneByteString::kHeaderSize -
5783                                                 kHeapObjectTag));
5784     }
5785     var_result.Bind(result);
5786     Goto(&out);
5787   }
5788 
5789   BIND(&if_isexternal);
5790   {
5791     GotoIf(IsShortExternalStringInstanceType(var_instance_type_.value()),
5792            if_bailout);
5793 
5794     Node* const string = var_string_.value();
5795     Node* result = LoadObjectField(string, ExternalString::kResourceDataOffset,
5796                                    MachineType::Pointer());
5797     if (ptr_kind == PTR_TO_STRING) {
5798       result = IntPtrSub(result, IntPtrConstant(SeqOneByteString::kHeaderSize -
5799                                                 kHeapObjectTag));
5800     }
5801     var_result.Bind(result);
5802     Goto(&out);
5803   }
5804 
5805   BIND(&out);
5806   return var_result.value();
5807 }
5808 
BranchIfCanDerefIndirectString(Node * string,Node * instance_type,Label * can_deref,Label * cannot_deref)5809 void CodeStubAssembler::BranchIfCanDerefIndirectString(Node* string,
5810                                                        Node* instance_type,
5811                                                        Label* can_deref,
5812                                                        Label* cannot_deref) {
5813   CSA_ASSERT(this, IsString(string));
5814   Node* representation =
5815       Word32And(instance_type, Int32Constant(kStringRepresentationMask));
5816   GotoIf(Word32Equal(representation, Int32Constant(kThinStringTag)), can_deref);
5817   GotoIf(Word32NotEqual(representation, Int32Constant(kConsStringTag)),
5818          cannot_deref);
5819   // Cons string.
5820   Node* rhs = LoadObjectField(string, ConsString::kSecondOffset);
5821   GotoIf(IsEmptyString(rhs), can_deref);
5822   Goto(cannot_deref);
5823 }
5824 
DerefIndirectString(TNode<String> string,TNode<Int32T> instance_type,Label * cannot_deref)5825 Node* CodeStubAssembler::DerefIndirectString(TNode<String> string,
5826                                              TNode<Int32T> instance_type,
5827                                              Label* cannot_deref) {
5828   Label deref(this);
5829   BranchIfCanDerefIndirectString(string, instance_type, &deref, cannot_deref);
5830   BIND(&deref);
5831   STATIC_ASSERT(ThinString::kActualOffset == ConsString::kFirstOffset);
5832   return LoadObjectField(string, ThinString::kActualOffset);
5833 }
5834 
DerefIndirectString(Variable * var_string,Node * instance_type)5835 void CodeStubAssembler::DerefIndirectString(Variable* var_string,
5836                                             Node* instance_type) {
5837 #ifdef DEBUG
5838   Label can_deref(this), cannot_deref(this);
5839   BranchIfCanDerefIndirectString(var_string->value(), instance_type, &can_deref,
5840                                  &cannot_deref);
5841   BIND(&cannot_deref);
5842   DebugBreak();  // Should be able to dereference string.
5843   Goto(&can_deref);
5844   BIND(&can_deref);
5845 #endif  // DEBUG
5846 
5847   STATIC_ASSERT(ThinString::kActualOffset == ConsString::kFirstOffset);
5848   var_string->Bind(
5849       LoadObjectField(var_string->value(), ThinString::kActualOffset));
5850 }
5851 
MaybeDerefIndirectString(Variable * var_string,Node * instance_type,Label * did_deref,Label * cannot_deref)5852 void CodeStubAssembler::MaybeDerefIndirectString(Variable* var_string,
5853                                                  Node* instance_type,
5854                                                  Label* did_deref,
5855                                                  Label* cannot_deref) {
5856   Label deref(this);
5857   BranchIfCanDerefIndirectString(var_string->value(), instance_type, &deref,
5858                                  cannot_deref);
5859 
5860   BIND(&deref);
5861   {
5862     DerefIndirectString(var_string, instance_type);
5863     Goto(did_deref);
5864   }
5865 }
5866 
MaybeDerefIndirectStrings(Variable * var_left,Node * left_instance_type,Variable * var_right,Node * right_instance_type,Label * did_something)5867 void CodeStubAssembler::MaybeDerefIndirectStrings(Variable* var_left,
5868                                                   Node* left_instance_type,
5869                                                   Variable* var_right,
5870                                                   Node* right_instance_type,
5871                                                   Label* did_something) {
5872   Label did_nothing_left(this), did_something_left(this),
5873       didnt_do_anything(this);
5874   MaybeDerefIndirectString(var_left, left_instance_type, &did_something_left,
5875                            &did_nothing_left);
5876 
5877   BIND(&did_something_left);
5878   {
5879     MaybeDerefIndirectString(var_right, right_instance_type, did_something,
5880                              did_something);
5881   }
5882 
5883   BIND(&did_nothing_left);
5884   {
5885     MaybeDerefIndirectString(var_right, right_instance_type, did_something,
5886                              &didnt_do_anything);
5887   }
5888 
5889   BIND(&didnt_do_anything);
5890   // Fall through if neither string was an indirect string.
5891 }
5892 
StringAdd(Node * context,TNode<String> left,TNode<String> right,AllocationFlags flags)5893 TNode<String> CodeStubAssembler::StringAdd(Node* context, TNode<String> left,
5894                                            TNode<String> right,
5895                                            AllocationFlags flags) {
5896   TVARIABLE(String, result);
5897   Label check_right(this), runtime(this, Label::kDeferred), cons(this),
5898       done(this, &result), done_native(this, &result);
5899   Counters* counters = isolate()->counters();
5900 
5901   TNode<Smi> left_length = LoadStringLengthAsSmi(left);
5902   GotoIf(SmiNotEqual(SmiConstant(0), left_length), &check_right);
5903   result = right;
5904   Goto(&done_native);
5905 
5906   BIND(&check_right);
5907   TNode<Smi> right_length = LoadStringLengthAsSmi(right);
5908   GotoIf(SmiNotEqual(SmiConstant(0), right_length), &cons);
5909   result = left;
5910   Goto(&done_native);
5911 
5912   BIND(&cons);
5913   {
5914     TNode<Smi> new_length = SmiAdd(left_length, right_length);
5915 
5916     // If new length is greater than String::kMaxLength, goto runtime to
5917     // throw. Note: we also need to invalidate the string length protector, so
5918     // can't just throw here directly.
5919     GotoIf(SmiGreaterThan(new_length, SmiConstant(String::kMaxLength)),
5920            &runtime);
5921 
5922     TVARIABLE(String, var_left, left);
5923     TVARIABLE(String, var_right, right);
5924     Variable* input_vars[2] = {&var_left, &var_right};
5925     Label non_cons(this, 2, input_vars);
5926     Label slow(this, Label::kDeferred);
5927     GotoIf(SmiLessThan(new_length, SmiConstant(ConsString::kMinLength)),
5928            &non_cons);
5929 
5930     result = NewConsString(context, new_length, var_left.value(),
5931                            var_right.value(), flags);
5932     Goto(&done_native);
5933 
5934     BIND(&non_cons);
5935 
5936     Comment("Full string concatenate");
5937     Node* left_instance_type = LoadInstanceType(var_left.value());
5938     Node* right_instance_type = LoadInstanceType(var_right.value());
5939     // Compute intersection and difference of instance types.
5940 
5941     Node* ored_instance_types =
5942         Word32Or(left_instance_type, right_instance_type);
5943     Node* xored_instance_types =
5944         Word32Xor(left_instance_type, right_instance_type);
5945 
5946     // Check if both strings have the same encoding and both are sequential.
5947     GotoIf(IsSetWord32(xored_instance_types, kStringEncodingMask), &runtime);
5948     GotoIf(IsSetWord32(ored_instance_types, kStringRepresentationMask), &slow);
5949 
5950     TNode<IntPtrT> word_left_length = SmiUntag(left_length);
5951     TNode<IntPtrT> word_right_length = SmiUntag(right_length);
5952 
5953     Label two_byte(this);
5954     GotoIf(Word32Equal(Word32And(ored_instance_types,
5955                                  Int32Constant(kStringEncodingMask)),
5956                        Int32Constant(kTwoByteStringTag)),
5957            &two_byte);
5958     // One-byte sequential string case
5959     result = AllocateSeqOneByteString(context, new_length);
5960     CopyStringCharacters(var_left.value(), result.value(), IntPtrConstant(0),
5961                          IntPtrConstant(0), word_left_length,
5962                          String::ONE_BYTE_ENCODING, String::ONE_BYTE_ENCODING);
5963     CopyStringCharacters(var_right.value(), result.value(), IntPtrConstant(0),
5964                          word_left_length, word_right_length,
5965                          String::ONE_BYTE_ENCODING, String::ONE_BYTE_ENCODING);
5966     Goto(&done_native);
5967 
5968     BIND(&two_byte);
5969     {
5970       // Two-byte sequential string case
5971       result = AllocateSeqTwoByteString(context, new_length);
5972       CopyStringCharacters(var_left.value(), result.value(), IntPtrConstant(0),
5973                            IntPtrConstant(0), word_left_length,
5974                            String::TWO_BYTE_ENCODING,
5975                            String::TWO_BYTE_ENCODING);
5976       CopyStringCharacters(var_right.value(), result.value(), IntPtrConstant(0),
5977                            word_left_length, word_right_length,
5978                            String::TWO_BYTE_ENCODING,
5979                            String::TWO_BYTE_ENCODING);
5980       Goto(&done_native);
5981     }
5982 
5983     BIND(&slow);
5984     {
5985       // Try to unwrap indirect strings, restart the above attempt on success.
5986       MaybeDerefIndirectStrings(&var_left, left_instance_type, &var_right,
5987                                 right_instance_type, &non_cons);
5988       Goto(&runtime);
5989     }
5990   }
5991   BIND(&runtime);
5992   {
5993     result = CAST(CallRuntime(Runtime::kStringAdd, context, left, right));
5994     Goto(&done);
5995   }
5996 
5997   BIND(&done_native);
5998   {
5999     IncrementCounter(counters->string_add_native(), 1);
6000     Goto(&done);
6001   }
6002 
6003   BIND(&done);
6004   return result.value();
6005 }
6006 
StringFromSingleCodePoint(TNode<Int32T> codepoint,UnicodeEncoding encoding)6007 TNode<String> CodeStubAssembler::StringFromSingleCodePoint(
6008     TNode<Int32T> codepoint, UnicodeEncoding encoding) {
6009   VARIABLE(var_result, MachineRepresentation::kTagged, EmptyStringConstant());
6010 
6011   Label if_isword16(this), if_isword32(this), return_result(this);
6012 
6013   Branch(Uint32LessThan(codepoint, Int32Constant(0x10000)), &if_isword16,
6014          &if_isword32);
6015 
6016   BIND(&if_isword16);
6017   {
6018     var_result.Bind(StringFromSingleCharCode(codepoint));
6019     Goto(&return_result);
6020   }
6021 
6022   BIND(&if_isword32);
6023   {
6024     switch (encoding) {
6025       case UnicodeEncoding::UTF16:
6026         break;
6027       case UnicodeEncoding::UTF32: {
6028         // Convert UTF32 to UTF16 code units, and store as a 32 bit word.
6029         Node* lead_offset = Int32Constant(0xD800 - (0x10000 >> 10));
6030 
6031         // lead = (codepoint >> 10) + LEAD_OFFSET
6032         Node* lead =
6033             Int32Add(Word32Shr(codepoint, Int32Constant(10)), lead_offset);
6034 
6035         // trail = (codepoint & 0x3FF) + 0xDC00;
6036         Node* trail = Int32Add(Word32And(codepoint, Int32Constant(0x3FF)),
6037                                Int32Constant(0xDC00));
6038 
6039         // codpoint = (trail << 16) | lead;
6040         codepoint = Signed(Word32Or(Word32Shl(trail, Int32Constant(16)), lead));
6041         break;
6042       }
6043     }
6044 
6045     Node* value = AllocateSeqTwoByteString(2);
6046     StoreNoWriteBarrier(
6047         MachineRepresentation::kWord32, value,
6048         IntPtrConstant(SeqTwoByteString::kHeaderSize - kHeapObjectTag),
6049         codepoint);
6050     var_result.Bind(value);
6051     Goto(&return_result);
6052   }
6053 
6054   BIND(&return_result);
6055   return CAST(var_result.value());
6056 }
6057 
StringToNumber(TNode<String> input)6058 TNode<Number> CodeStubAssembler::StringToNumber(TNode<String> input) {
6059   Label runtime(this, Label::kDeferred);
6060   Label end(this);
6061 
6062   TVARIABLE(Number, var_result);
6063 
6064   // Check if string has a cached array index.
6065   TNode<Uint32T> hash = LoadNameHashField(input);
6066   GotoIf(IsSetWord32(hash, Name::kDoesNotContainCachedArrayIndexMask),
6067          &runtime);
6068 
6069   var_result =
6070       SmiTag(Signed(DecodeWordFromWord32<String::ArrayIndexValueBits>(hash)));
6071   Goto(&end);
6072 
6073   BIND(&runtime);
6074   {
6075     var_result =
6076         CAST(CallRuntime(Runtime::kStringToNumber, NoContextConstant(), input));
6077     Goto(&end);
6078   }
6079 
6080   BIND(&end);
6081   return var_result.value();
6082 }
6083 
NumberToString(TNode<Number> input)6084 TNode<String> CodeStubAssembler::NumberToString(TNode<Number> input) {
6085   TVARIABLE(String, result);
6086   Label runtime(this, Label::kDeferred), smi(this), done(this, &result);
6087 
6088   // Load the number string cache.
6089   Node* number_string_cache = LoadRoot(Heap::kNumberStringCacheRootIndex);
6090 
6091   // Make the hash mask from the length of the number string cache. It
6092   // contains two elements (number and string) for each cache entry.
6093   // TODO(ishell): cleanup mask handling.
6094   Node* mask =
6095       BitcastTaggedToWord(LoadFixedArrayBaseLength(number_string_cache));
6096   TNode<IntPtrT> one = IntPtrConstant(1);
6097   mask = IntPtrSub(mask, one);
6098 
6099   GotoIf(TaggedIsSmi(input), &smi);
6100 
6101   TNode<HeapNumber> heap_number_input = CAST(input);
6102 
6103   // Make a hash from the two 32-bit values of the double.
6104   TNode<Int32T> low =
6105       LoadObjectField<Int32T>(heap_number_input, HeapNumber::kValueOffset);
6106   TNode<Int32T> high = LoadObjectField<Int32T>(
6107       heap_number_input, HeapNumber::kValueOffset + kIntSize);
6108   TNode<Word32T> hash = Word32Xor(low, high);
6109   TNode<WordT> word_hash = WordShl(ChangeInt32ToIntPtr(hash), one);
6110   TNode<WordT> index =
6111       WordAnd(word_hash, WordSar(mask, SmiShiftBitsConstant()));
6112 
6113   // Cache entry's key must be a heap number
6114   Node* number_key = LoadFixedArrayElement(number_string_cache, index);
6115   GotoIf(TaggedIsSmi(number_key), &runtime);
6116   GotoIfNot(IsHeapNumber(number_key), &runtime);
6117 
6118   // Cache entry's key must match the heap number value we're looking for.
6119   Node* low_compare = LoadObjectField(number_key, HeapNumber::kValueOffset,
6120                                       MachineType::Int32());
6121   Node* high_compare = LoadObjectField(
6122       number_key, HeapNumber::kValueOffset + kIntSize, MachineType::Int32());
6123   GotoIfNot(Word32Equal(low, low_compare), &runtime);
6124   GotoIfNot(Word32Equal(high, high_compare), &runtime);
6125 
6126   // Heap number match, return value from cache entry.
6127   IncrementCounter(isolate()->counters()->number_to_string_native(), 1);
6128   result =
6129       CAST(LoadFixedArrayElement(number_string_cache, index, kPointerSize));
6130   Goto(&done);
6131 
6132   BIND(&runtime);
6133   {
6134     // No cache entry, go to the runtime.
6135     result = CAST(CallRuntime(Runtime::kNumberToStringSkipCache,
6136                               NoContextConstant(), input));
6137   }
6138   Goto(&done);
6139 
6140   BIND(&smi);
6141   {
6142     // Load the smi key, make sure it matches the smi we're looking for.
6143     Node* smi_index = BitcastWordToTagged(
6144         WordAnd(WordShl(BitcastTaggedToWord(input), one), mask));
6145     Node* smi_key = LoadFixedArrayElement(number_string_cache, smi_index, 0,
6146                                           SMI_PARAMETERS);
6147     GotoIf(WordNotEqual(smi_key, input), &runtime);
6148 
6149     // Smi match, return value from cache entry.
6150     IncrementCounter(isolate()->counters()->number_to_string_native(), 1);
6151     result = CAST(LoadFixedArrayElement(number_string_cache, smi_index,
6152                                         kPointerSize, SMI_PARAMETERS));
6153     Goto(&done);
6154   }
6155 
6156   BIND(&done);
6157   return result.value();
6158 }
6159 
ToName(SloppyTNode<Context> context,SloppyTNode<Object> value)6160 TNode<Name> CodeStubAssembler::ToName(SloppyTNode<Context> context,
6161                                       SloppyTNode<Object> value) {
6162   Label end(this);
6163   TVARIABLE(Name, var_result);
6164 
6165   Label is_number(this);
6166   GotoIf(TaggedIsSmi(value), &is_number);
6167 
6168   Label not_name(this);
6169   TNode<Int32T> value_instance_type = LoadInstanceType(CAST(value));
6170   STATIC_ASSERT(FIRST_NAME_TYPE == FIRST_TYPE);
6171   GotoIf(Int32GreaterThan(value_instance_type, Int32Constant(LAST_NAME_TYPE)),
6172          &not_name);
6173 
6174   var_result = CAST(value);
6175   Goto(&end);
6176 
6177   BIND(&is_number);
6178   {
6179     var_result = CAST(CallBuiltin(Builtins::kNumberToString, context, value));
6180     Goto(&end);
6181   }
6182 
6183   BIND(&not_name);
6184   {
6185     GotoIf(InstanceTypeEqual(value_instance_type, HEAP_NUMBER_TYPE),
6186            &is_number);
6187 
6188     Label not_oddball(this);
6189     GotoIfNot(InstanceTypeEqual(value_instance_type, ODDBALL_TYPE),
6190               &not_oddball);
6191 
6192     var_result = LoadObjectField<String>(CAST(value), Oddball::kToStringOffset);
6193     Goto(&end);
6194 
6195     BIND(&not_oddball);
6196     {
6197       var_result = CAST(CallRuntime(Runtime::kToName, context, value));
6198       Goto(&end);
6199     }
6200   }
6201 
6202   BIND(&end);
6203   return var_result.value();
6204 }
6205 
NonNumberToNumberOrNumeric(Node * context,Node * input,Object::Conversion mode,BigIntHandling bigint_handling)6206 Node* CodeStubAssembler::NonNumberToNumberOrNumeric(
6207     Node* context, Node* input, Object::Conversion mode,
6208     BigIntHandling bigint_handling) {
6209   CSA_ASSERT(this, Word32BinaryNot(TaggedIsSmi(input)));
6210   CSA_ASSERT(this, Word32BinaryNot(IsHeapNumber(input)));
6211 
6212   // We might need to loop once here due to ToPrimitive conversions.
6213   VARIABLE(var_input, MachineRepresentation::kTagged, input);
6214   VARIABLE(var_result, MachineRepresentation::kTagged);
6215   Label loop(this, &var_input);
6216   Label end(this);
6217   Goto(&loop);
6218   BIND(&loop);
6219   {
6220     // Load the current {input} value (known to be a HeapObject).
6221     Node* input = var_input.value();
6222 
6223     // Dispatch on the {input} instance type.
6224     Node* input_instance_type = LoadInstanceType(input);
6225     Label if_inputisstring(this), if_inputisoddball(this),
6226         if_inputisbigint(this), if_inputisreceiver(this, Label::kDeferred),
6227         if_inputisother(this, Label::kDeferred);
6228     GotoIf(IsStringInstanceType(input_instance_type), &if_inputisstring);
6229     GotoIf(IsBigIntInstanceType(input_instance_type), &if_inputisbigint);
6230     GotoIf(InstanceTypeEqual(input_instance_type, ODDBALL_TYPE),
6231            &if_inputisoddball);
6232     Branch(IsJSReceiverInstanceType(input_instance_type), &if_inputisreceiver,
6233            &if_inputisother);
6234 
6235     BIND(&if_inputisstring);
6236     {
6237       // The {input} is a String, use the fast stub to convert it to a Number.
6238       TNode<String> string_input = CAST(input);
6239       var_result.Bind(StringToNumber(string_input));
6240       Goto(&end);
6241     }
6242 
6243     BIND(&if_inputisbigint);
6244     if (mode == Object::Conversion::kToNumeric) {
6245       var_result.Bind(input);
6246       Goto(&end);
6247     } else {
6248       DCHECK_EQ(mode, Object::Conversion::kToNumber);
6249       if (bigint_handling == BigIntHandling::kThrow) {
6250         Goto(&if_inputisother);
6251       } else {
6252         DCHECK_EQ(bigint_handling, BigIntHandling::kConvertToNumber);
6253         var_result.Bind(CallRuntime(Runtime::kBigIntToNumber, context, input));
6254         Goto(&end);
6255       }
6256     }
6257 
6258     BIND(&if_inputisoddball);
6259     {
6260       // The {input} is an Oddball, we just need to load the Number value of it.
6261       var_result.Bind(LoadObjectField(input, Oddball::kToNumberOffset));
6262       Goto(&end);
6263     }
6264 
6265     BIND(&if_inputisreceiver);
6266     {
6267       // The {input} is a JSReceiver, we need to convert it to a Primitive first
6268       // using the ToPrimitive type conversion, preferably yielding a Number.
6269       Callable callable = CodeFactory::NonPrimitiveToPrimitive(
6270           isolate(), ToPrimitiveHint::kNumber);
6271       Node* result = CallStub(callable, context, input);
6272 
6273       // Check if the {result} is already a Number/Numeric.
6274       Label if_done(this), if_notdone(this);
6275       Branch(mode == Object::Conversion::kToNumber ? IsNumber(result)
6276                                                    : IsNumeric(result),
6277              &if_done, &if_notdone);
6278 
6279       BIND(&if_done);
6280       {
6281         // The ToPrimitive conversion already gave us a Number/Numeric, so we're
6282         // done.
6283         var_result.Bind(result);
6284         Goto(&end);
6285       }
6286 
6287       BIND(&if_notdone);
6288       {
6289         // We now have a Primitive {result}, but it's not yet a Number/Numeric.
6290         var_input.Bind(result);
6291         Goto(&loop);
6292       }
6293     }
6294 
6295     BIND(&if_inputisother);
6296     {
6297       // The {input} is something else (e.g. Symbol), let the runtime figure
6298       // out the correct exception.
6299       // Note: We cannot tail call to the runtime here, as js-to-wasm
6300       // trampolines also use this code currently, and they declare all
6301       // outgoing parameters as untagged, while we would push a tagged
6302       // object here.
6303       auto function_id = mode == Object::Conversion::kToNumber
6304                              ? Runtime::kToNumber
6305                              : Runtime::kToNumeric;
6306       var_result.Bind(CallRuntime(function_id, context, input));
6307       Goto(&end);
6308     }
6309   }
6310 
6311   BIND(&end);
6312   if (mode == Object::Conversion::kToNumeric) {
6313     CSA_ASSERT(this, IsNumeric(var_result.value()));
6314   } else {
6315     DCHECK_EQ(mode, Object::Conversion::kToNumber);
6316     CSA_ASSERT(this, IsNumber(var_result.value()));
6317   }
6318   return var_result.value();
6319 }
6320 
NonNumberToNumber(SloppyTNode<Context> context,SloppyTNode<HeapObject> input,BigIntHandling bigint_handling)6321 TNode<Number> CodeStubAssembler::NonNumberToNumber(
6322     SloppyTNode<Context> context, SloppyTNode<HeapObject> input,
6323     BigIntHandling bigint_handling) {
6324   return CAST(NonNumberToNumberOrNumeric(
6325       context, input, Object::Conversion::kToNumber, bigint_handling));
6326 }
6327 
NonNumberToNumeric(SloppyTNode<Context> context,SloppyTNode<HeapObject> input)6328 TNode<Numeric> CodeStubAssembler::NonNumberToNumeric(
6329     SloppyTNode<Context> context, SloppyTNode<HeapObject> input) {
6330   Node* result = NonNumberToNumberOrNumeric(context, input,
6331                                             Object::Conversion::kToNumeric);
6332   CSA_SLOW_ASSERT(this, IsNumeric(result));
6333   return UncheckedCast<Numeric>(result);
6334 }
6335 
ToNumber_Inline(SloppyTNode<Context> context,SloppyTNode<Object> input)6336 TNode<Number> CodeStubAssembler::ToNumber_Inline(SloppyTNode<Context> context,
6337                                                  SloppyTNode<Object> input) {
6338   TVARIABLE(Number, var_result);
6339   Label end(this), not_smi(this, Label::kDeferred);
6340 
6341   GotoIfNot(TaggedIsSmi(input), &not_smi);
6342   var_result = CAST(input);
6343   Goto(&end);
6344 
6345   BIND(&not_smi);
6346   {
6347     var_result =
6348         Select<Number>(IsHeapNumber(CAST(input)), [=] { return CAST(input); },
6349                        [=] {
6350                          return CAST(CallBuiltin(Builtins::kNonNumberToNumber,
6351                                                  context, input));
6352                        });
6353     Goto(&end);
6354   }
6355 
6356   BIND(&end);
6357   return var_result.value();
6358 }
6359 
ToNumber(SloppyTNode<Context> context,SloppyTNode<Object> input,BigIntHandling bigint_handling)6360 TNode<Number> CodeStubAssembler::ToNumber(SloppyTNode<Context> context,
6361                                           SloppyTNode<Object> input,
6362                                           BigIntHandling bigint_handling) {
6363   TVARIABLE(Number, var_result);
6364   Label end(this);
6365 
6366   Label not_smi(this, Label::kDeferred);
6367   GotoIfNot(TaggedIsSmi(input), &not_smi);
6368   TNode<Smi> input_smi = CAST(input);
6369   var_result = input_smi;
6370   Goto(&end);
6371 
6372   BIND(&not_smi);
6373   {
6374     Label not_heap_number(this, Label::kDeferred);
6375     TNode<HeapObject> input_ho = CAST(input);
6376     GotoIfNot(IsHeapNumber(input_ho), &not_heap_number);
6377 
6378     TNode<HeapNumber> input_hn = CAST(input_ho);
6379     var_result = input_hn;
6380     Goto(&end);
6381 
6382     BIND(&not_heap_number);
6383     {
6384       var_result = NonNumberToNumber(context, input_ho, bigint_handling);
6385       Goto(&end);
6386     }
6387   }
6388 
6389   BIND(&end);
6390   return var_result.value();
6391 }
6392 
ToBigInt(SloppyTNode<Context> context,SloppyTNode<Object> input)6393 TNode<BigInt> CodeStubAssembler::ToBigInt(SloppyTNode<Context> context,
6394                                           SloppyTNode<Object> input) {
6395   TVARIABLE(BigInt, var_result);
6396   Label if_bigint(this), done(this), if_throw(this);
6397 
6398   GotoIf(TaggedIsSmi(input), &if_throw);
6399   GotoIf(IsBigInt(CAST(input)), &if_bigint);
6400   var_result = CAST(CallRuntime(Runtime::kToBigInt, context, input));
6401   Goto(&done);
6402 
6403   BIND(&if_bigint);
6404   var_result = CAST(input);
6405   Goto(&done);
6406 
6407   BIND(&if_throw);
6408   ThrowTypeError(context, MessageTemplate::kBigIntFromObject, input);
6409 
6410   BIND(&done);
6411   return var_result.value();
6412 }
6413 
TaggedToNumeric(Node * context,Node * value,Label * done,Variable * var_numeric)6414 void CodeStubAssembler::TaggedToNumeric(Node* context, Node* value, Label* done,
6415                                         Variable* var_numeric) {
6416   TaggedToNumeric(context, value, done, var_numeric, nullptr);
6417 }
6418 
TaggedToNumericWithFeedback(Node * context,Node * value,Label * done,Variable * var_numeric,Variable * var_feedback)6419 void CodeStubAssembler::TaggedToNumericWithFeedback(Node* context, Node* value,
6420                                                     Label* done,
6421                                                     Variable* var_numeric,
6422                                                     Variable* var_feedback) {
6423   DCHECK_NOT_NULL(var_feedback);
6424   TaggedToNumeric(context, value, done, var_numeric, var_feedback);
6425 }
6426 
TaggedToNumeric(Node * context,Node * value,Label * done,Variable * var_numeric,Variable * var_feedback)6427 void CodeStubAssembler::TaggedToNumeric(Node* context, Node* value, Label* done,
6428                                         Variable* var_numeric,
6429                                         Variable* var_feedback) {
6430   var_numeric->Bind(value);
6431   Label if_smi(this), if_heapnumber(this), if_bigint(this), if_oddball(this);
6432   GotoIf(TaggedIsSmi(value), &if_smi);
6433   Node* map = LoadMap(value);
6434   GotoIf(IsHeapNumberMap(map), &if_heapnumber);
6435   Node* instance_type = LoadMapInstanceType(map);
6436   GotoIf(IsBigIntInstanceType(instance_type), &if_bigint);
6437 
6438   // {value} is not a Numeric yet.
6439   GotoIf(Word32Equal(instance_type, Int32Constant(ODDBALL_TYPE)), &if_oddball);
6440   var_numeric->Bind(CallBuiltin(Builtins::kNonNumberToNumeric, context, value));
6441   OverwriteFeedback(var_feedback, BinaryOperationFeedback::kAny);
6442   Goto(done);
6443 
6444   BIND(&if_smi);
6445   OverwriteFeedback(var_feedback, BinaryOperationFeedback::kSignedSmall);
6446   Goto(done);
6447 
6448   BIND(&if_heapnumber);
6449   OverwriteFeedback(var_feedback, BinaryOperationFeedback::kNumber);
6450   Goto(done);
6451 
6452   BIND(&if_bigint);
6453   OverwriteFeedback(var_feedback, BinaryOperationFeedback::kBigInt);
6454   Goto(done);
6455 
6456   BIND(&if_oddball);
6457   OverwriteFeedback(var_feedback, BinaryOperationFeedback::kNumberOrOddball);
6458   var_numeric->Bind(LoadObjectField(value, Oddball::kToNumberOffset));
6459   Goto(done);
6460 }
6461 
6462 // ES#sec-touint32
ToUint32(SloppyTNode<Context> context,SloppyTNode<Object> input)6463 TNode<Number> CodeStubAssembler::ToUint32(SloppyTNode<Context> context,
6464                                           SloppyTNode<Object> input) {
6465   Node* const float_zero = Float64Constant(0.0);
6466   Node* const float_two_32 = Float64Constant(static_cast<double>(1ULL << 32));
6467 
6468   Label out(this);
6469 
6470   VARIABLE(var_result, MachineRepresentation::kTagged, input);
6471 
6472   // Early exit for positive smis.
6473   {
6474     // TODO(jgruber): This branch and the recheck below can be removed once we
6475     // have a ToNumber with multiple exits.
6476     Label next(this, Label::kDeferred);
6477     Branch(TaggedIsPositiveSmi(input), &out, &next);
6478     BIND(&next);
6479   }
6480 
6481   Node* const number = ToNumber(context, input);
6482   var_result.Bind(number);
6483 
6484   // Perhaps we have a positive smi now.
6485   {
6486     Label next(this, Label::kDeferred);
6487     Branch(TaggedIsPositiveSmi(number), &out, &next);
6488     BIND(&next);
6489   }
6490 
6491   Label if_isnegativesmi(this), if_isheapnumber(this);
6492   Branch(TaggedIsSmi(number), &if_isnegativesmi, &if_isheapnumber);
6493 
6494   BIND(&if_isnegativesmi);
6495   {
6496     Node* const uint32_value = SmiToInt32(number);
6497     Node* float64_value = ChangeUint32ToFloat64(uint32_value);
6498     var_result.Bind(AllocateHeapNumberWithValue(float64_value));
6499     Goto(&out);
6500   }
6501 
6502   BIND(&if_isheapnumber);
6503   {
6504     Label return_zero(this);
6505     Node* const value = LoadHeapNumberValue(number);
6506 
6507     {
6508       // +-0.
6509       Label next(this);
6510       Branch(Float64Equal(value, float_zero), &return_zero, &next);
6511       BIND(&next);
6512     }
6513 
6514     {
6515       // NaN.
6516       Label next(this);
6517       Branch(Float64Equal(value, value), &next, &return_zero);
6518       BIND(&next);
6519     }
6520 
6521     {
6522       // +Infinity.
6523       Label next(this);
6524       Node* const positive_infinity =
6525           Float64Constant(std::numeric_limits<double>::infinity());
6526       Branch(Float64Equal(value, positive_infinity), &return_zero, &next);
6527       BIND(&next);
6528     }
6529 
6530     {
6531       // -Infinity.
6532       Label next(this);
6533       Node* const negative_infinity =
6534           Float64Constant(-1.0 * std::numeric_limits<double>::infinity());
6535       Branch(Float64Equal(value, negative_infinity), &return_zero, &next);
6536       BIND(&next);
6537     }
6538 
6539     // * Let int be the mathematical value that is the same sign as number and
6540     //   whose magnitude is floor(abs(number)).
6541     // * Let int32bit be int modulo 2^32.
6542     // * Return int32bit.
6543     {
6544       Node* x = Float64Trunc(value);
6545       x = Float64Mod(x, float_two_32);
6546       x = Float64Add(x, float_two_32);
6547       x = Float64Mod(x, float_two_32);
6548 
6549       Node* const result = ChangeFloat64ToTagged(x);
6550       var_result.Bind(result);
6551       Goto(&out);
6552     }
6553 
6554     BIND(&return_zero);
6555     {
6556       var_result.Bind(SmiConstant(0));
6557       Goto(&out);
6558     }
6559   }
6560 
6561   BIND(&out);
6562   return CAST(var_result.value());
6563 }
6564 
ToString(SloppyTNode<Context> context,SloppyTNode<Object> input)6565 TNode<String> CodeStubAssembler::ToString(SloppyTNode<Context> context,
6566                                           SloppyTNode<Object> input) {
6567   Label is_number(this);
6568   Label runtime(this, Label::kDeferred), done(this);
6569   VARIABLE(result, MachineRepresentation::kTagged);
6570   GotoIf(TaggedIsSmi(input), &is_number);
6571 
6572   TNode<Map> input_map = LoadMap(CAST(input));
6573   TNode<Int32T> input_instance_type = LoadMapInstanceType(input_map);
6574 
6575   result.Bind(input);
6576   GotoIf(IsStringInstanceType(input_instance_type), &done);
6577 
6578   Label not_heap_number(this);
6579   Branch(IsHeapNumberMap(input_map), &is_number, &not_heap_number);
6580 
6581   BIND(&is_number);
6582   TNode<Number> number_input = CAST(input);
6583   result.Bind(NumberToString(number_input));
6584   Goto(&done);
6585 
6586   BIND(&not_heap_number);
6587   {
6588     GotoIfNot(InstanceTypeEqual(input_instance_type, ODDBALL_TYPE), &runtime);
6589     result.Bind(LoadObjectField(CAST(input), Oddball::kToStringOffset));
6590     Goto(&done);
6591   }
6592 
6593   BIND(&runtime);
6594   {
6595     result.Bind(CallRuntime(Runtime::kToString, context, input));
6596     Goto(&done);
6597   }
6598 
6599   BIND(&done);
6600   return CAST(result.value());
6601 }
6602 
ToString_Inline(SloppyTNode<Context> context,SloppyTNode<Object> input)6603 TNode<String> CodeStubAssembler::ToString_Inline(SloppyTNode<Context> context,
6604                                                  SloppyTNode<Object> input) {
6605   VARIABLE(var_result, MachineRepresentation::kTagged, input);
6606   Label stub_call(this, Label::kDeferred), out(this);
6607 
6608   GotoIf(TaggedIsSmi(input), &stub_call);
6609   Branch(IsString(CAST(input)), &out, &stub_call);
6610 
6611   BIND(&stub_call);
6612   var_result.Bind(CallBuiltin(Builtins::kToString, context, input));
6613   Goto(&out);
6614 
6615   BIND(&out);
6616   return CAST(var_result.value());
6617 }
6618 
JSReceiverToPrimitive(Node * context,Node * input)6619 Node* CodeStubAssembler::JSReceiverToPrimitive(Node* context, Node* input) {
6620   Label if_isreceiver(this, Label::kDeferred), if_isnotreceiver(this);
6621   VARIABLE(result, MachineRepresentation::kTagged);
6622   Label done(this, &result);
6623 
6624   BranchIfJSReceiver(input, &if_isreceiver, &if_isnotreceiver);
6625 
6626   BIND(&if_isreceiver);
6627   {
6628     // Convert {input} to a primitive first passing Number hint.
6629     Callable callable = CodeFactory::NonPrimitiveToPrimitive(isolate());
6630     result.Bind(CallStub(callable, context, input));
6631     Goto(&done);
6632   }
6633 
6634   BIND(&if_isnotreceiver);
6635   {
6636     result.Bind(input);
6637     Goto(&done);
6638   }
6639 
6640   BIND(&done);
6641   return result.value();
6642 }
6643 
ToObject(SloppyTNode<Context> context,SloppyTNode<Object> input)6644 TNode<JSReceiver> CodeStubAssembler::ToObject(SloppyTNode<Context> context,
6645                                               SloppyTNode<Object> input) {
6646   return CAST(CallBuiltin(Builtins::kToObject, context, input));
6647 }
6648 
ToSmiIndex(TNode<Object> input,TNode<Context> context,Label * range_error)6649 TNode<Smi> CodeStubAssembler::ToSmiIndex(TNode<Object> input,
6650                                          TNode<Context> context,
6651                                          Label* range_error) {
6652   TVARIABLE(Smi, result);
6653   Label check_undefined(this), return_zero(this), defined(this),
6654       negative_check(this), done(this);
6655 
6656   GotoIfNot(TaggedIsSmi(input), &check_undefined);
6657   result = CAST(input);
6658   Goto(&negative_check);
6659 
6660   BIND(&check_undefined);
6661   Branch(IsUndefined(input), &return_zero, &defined);
6662 
6663   BIND(&defined);
6664   TNode<Number> integer_input =
6665       CAST(CallBuiltin(Builtins::kToInteger_TruncateMinusZero, context, input));
6666   GotoIfNot(TaggedIsSmi(integer_input), range_error);
6667   result = CAST(integer_input);
6668   Goto(&negative_check);
6669 
6670   BIND(&negative_check);
6671   Branch(SmiLessThan(result.value(), SmiConstant(0)), range_error, &done);
6672 
6673   BIND(&return_zero);
6674   result = SmiConstant(0);
6675   Goto(&done);
6676 
6677   BIND(&done);
6678   return result.value();
6679 }
6680 
ToSmiLength(TNode<Object> input,TNode<Context> context,Label * range_error)6681 TNode<Smi> CodeStubAssembler::ToSmiLength(TNode<Object> input,
6682                                           TNode<Context> context,
6683                                           Label* range_error) {
6684   TVARIABLE(Smi, result);
6685   Label to_integer(this), negative_check(this),
6686       heap_number_negative_check(this), return_zero(this), done(this);
6687 
6688   GotoIfNot(TaggedIsSmi(input), &to_integer);
6689   result = CAST(input);
6690   Goto(&negative_check);
6691 
6692   BIND(&to_integer);
6693   {
6694     TNode<Number> integer_input = CAST(
6695         CallBuiltin(Builtins::kToInteger_TruncateMinusZero, context, input));
6696     GotoIfNot(TaggedIsSmi(integer_input), &heap_number_negative_check);
6697     result = CAST(integer_input);
6698     Goto(&negative_check);
6699 
6700     // integer_input can still be a negative HeapNumber here.
6701     BIND(&heap_number_negative_check);
6702     TNode<HeapNumber> heap_number_input = CAST(integer_input);
6703     Branch(IsTrue(CallBuiltin(Builtins::kLessThan, context, heap_number_input,
6704                               SmiConstant(0))),
6705            &return_zero, range_error);
6706   }
6707 
6708   BIND(&negative_check);
6709   Branch(SmiLessThan(result.value(), SmiConstant(0)), &return_zero, &done);
6710 
6711   BIND(&return_zero);
6712   result = SmiConstant(0);
6713   Goto(&done);
6714 
6715   BIND(&done);
6716   return result.value();
6717 }
6718 
ToLength_Inline(SloppyTNode<Context> context,SloppyTNode<Object> input)6719 TNode<Number> CodeStubAssembler::ToLength_Inline(SloppyTNode<Context> context,
6720                                                  SloppyTNode<Object> input) {
6721   TNode<Smi> smi_zero = SmiConstant(0);
6722   return Select<Number>(
6723       TaggedIsSmi(input), [=] { return SmiMax(CAST(input), smi_zero); },
6724       [=] { return CAST(CallBuiltin(Builtins::kToLength, context, input)); });
6725 }
6726 
ToInteger_Inline(SloppyTNode<Context> context,SloppyTNode<Object> input,ToIntegerTruncationMode mode)6727 TNode<Number> CodeStubAssembler::ToInteger_Inline(
6728     SloppyTNode<Context> context, SloppyTNode<Object> input,
6729     ToIntegerTruncationMode mode) {
6730   Builtins::Name builtin = (mode == kNoTruncation)
6731                                ? Builtins::kToInteger
6732                                : Builtins::kToInteger_TruncateMinusZero;
6733   return Select<Number>(
6734       TaggedIsSmi(input), [=] { return CAST(input); },
6735       [=] { return CAST(CallBuiltin(builtin, context, input)); });
6736 }
6737 
ToInteger(SloppyTNode<Context> context,SloppyTNode<Object> input,ToIntegerTruncationMode mode)6738 TNode<Number> CodeStubAssembler::ToInteger(SloppyTNode<Context> context,
6739                                            SloppyTNode<Object> input,
6740                                            ToIntegerTruncationMode mode) {
6741   // We might need to loop once for ToNumber conversion.
6742   TVARIABLE(Object, var_arg, input);
6743   Label loop(this, &var_arg), out(this);
6744   Goto(&loop);
6745   BIND(&loop);
6746   {
6747     // Shared entry points.
6748     Label return_zero(this, Label::kDeferred);
6749 
6750     // Load the current {arg} value.
6751     TNode<Object> arg = var_arg.value();
6752 
6753     // Check if {arg} is a Smi.
6754     GotoIf(TaggedIsSmi(arg), &out);
6755 
6756     // Check if {arg} is a HeapNumber.
6757     Label if_argisheapnumber(this),
6758         if_argisnotheapnumber(this, Label::kDeferred);
6759     Branch(IsHeapNumber(CAST(arg)), &if_argisheapnumber,
6760            &if_argisnotheapnumber);
6761 
6762     BIND(&if_argisheapnumber);
6763     {
6764       TNode<HeapNumber> arg_hn = CAST(arg);
6765       // Load the floating-point value of {arg}.
6766       Node* arg_value = LoadHeapNumberValue(arg_hn);
6767 
6768       // Check if {arg} is NaN.
6769       GotoIfNot(Float64Equal(arg_value, arg_value), &return_zero);
6770 
6771       // Truncate {arg} towards zero.
6772       TNode<Float64T> value = Float64Trunc(arg_value);
6773 
6774       if (mode == kTruncateMinusZero) {
6775         // Truncate -0.0 to 0.
6776         GotoIf(Float64Equal(value, Float64Constant(0.0)), &return_zero);
6777       }
6778 
6779       var_arg = ChangeFloat64ToTagged(value);
6780       Goto(&out);
6781     }
6782 
6783     BIND(&if_argisnotheapnumber);
6784     {
6785       // Need to convert {arg} to a Number first.
6786       var_arg = UncheckedCast<Object>(
6787           CallBuiltin(Builtins::kNonNumberToNumber, context, arg));
6788       Goto(&loop);
6789     }
6790 
6791     BIND(&return_zero);
6792     var_arg = SmiConstant(0);
6793     Goto(&out);
6794   }
6795 
6796   BIND(&out);
6797   if (mode == kTruncateMinusZero) {
6798     CSA_ASSERT(this, IsNumberNormalized(CAST(var_arg.value())));
6799   }
6800   return CAST(var_arg.value());
6801 }
6802 
DecodeWord32(SloppyTNode<Word32T> word32,uint32_t shift,uint32_t mask)6803 TNode<Uint32T> CodeStubAssembler::DecodeWord32(SloppyTNode<Word32T> word32,
6804                                                uint32_t shift, uint32_t mask) {
6805   return UncheckedCast<Uint32T>(Word32Shr(
6806       Word32And(word32, Int32Constant(mask)), static_cast<int>(shift)));
6807 }
6808 
DecodeWord(SloppyTNode<WordT> word,uint32_t shift,uint32_t mask)6809 TNode<UintPtrT> CodeStubAssembler::DecodeWord(SloppyTNode<WordT> word,
6810                                               uint32_t shift, uint32_t mask) {
6811   return Unsigned(
6812       WordShr(WordAnd(word, IntPtrConstant(mask)), static_cast<int>(shift)));
6813 }
6814 
UpdateWord(Node * word,Node * value,uint32_t shift,uint32_t mask)6815 Node* CodeStubAssembler::UpdateWord(Node* word, Node* value, uint32_t shift,
6816                                     uint32_t mask) {
6817   Node* encoded_value = WordShl(value, static_cast<int>(shift));
6818   Node* inverted_mask = IntPtrConstant(~static_cast<intptr_t>(mask));
6819   // Ensure the {value} fits fully in the mask.
6820   CSA_ASSERT(this, WordEqual(WordAnd(encoded_value, inverted_mask),
6821                              IntPtrConstant(0)));
6822   return WordOr(WordAnd(word, inverted_mask), encoded_value);
6823 }
6824 
SetCounter(StatsCounter * counter,int value)6825 void CodeStubAssembler::SetCounter(StatsCounter* counter, int value) {
6826   if (FLAG_native_code_counters && counter->Enabled()) {
6827     Node* counter_address =
6828         ExternalConstant(ExternalReference::Create(counter));
6829     StoreNoWriteBarrier(MachineRepresentation::kWord32, counter_address,
6830                         Int32Constant(value));
6831   }
6832 }
6833 
IncrementCounter(StatsCounter * counter,int delta)6834 void CodeStubAssembler::IncrementCounter(StatsCounter* counter, int delta) {
6835   DCHECK_GT(delta, 0);
6836   if (FLAG_native_code_counters && counter->Enabled()) {
6837     Node* counter_address =
6838         ExternalConstant(ExternalReference::Create(counter));
6839     Node* value = Load(MachineType::Int32(), counter_address);
6840     value = Int32Add(value, Int32Constant(delta));
6841     StoreNoWriteBarrier(MachineRepresentation::kWord32, counter_address, value);
6842   }
6843 }
6844 
DecrementCounter(StatsCounter * counter,int delta)6845 void CodeStubAssembler::DecrementCounter(StatsCounter* counter, int delta) {
6846   DCHECK_GT(delta, 0);
6847   if (FLAG_native_code_counters && counter->Enabled()) {
6848     Node* counter_address =
6849         ExternalConstant(ExternalReference::Create(counter));
6850     Node* value = Load(MachineType::Int32(), counter_address);
6851     value = Int32Sub(value, Int32Constant(delta));
6852     StoreNoWriteBarrier(MachineRepresentation::kWord32, counter_address, value);
6853   }
6854 }
6855 
Increment(Variable * variable,int value,ParameterMode mode)6856 void CodeStubAssembler::Increment(Variable* variable, int value,
6857                                   ParameterMode mode) {
6858   DCHECK_IMPLIES(mode == INTPTR_PARAMETERS,
6859                  variable->rep() == MachineType::PointerRepresentation());
6860   DCHECK_IMPLIES(mode == SMI_PARAMETERS,
6861                  variable->rep() == MachineRepresentation::kTagged ||
6862                      variable->rep() == MachineRepresentation::kTaggedSigned);
6863   variable->Bind(IntPtrOrSmiAdd(variable->value(),
6864                                 IntPtrOrSmiConstant(value, mode), mode));
6865 }
6866 
Use(Label * label)6867 void CodeStubAssembler::Use(Label* label) {
6868   GotoIf(Word32Equal(Int32Constant(0), Int32Constant(1)), label);
6869 }
6870 
TryToName(Node * key,Label * if_keyisindex,Variable * var_index,Label * if_keyisunique,Variable * var_unique,Label * if_bailout,Label * if_notinternalized)6871 void CodeStubAssembler::TryToName(Node* key, Label* if_keyisindex,
6872                                   Variable* var_index, Label* if_keyisunique,
6873                                   Variable* var_unique, Label* if_bailout,
6874                                   Label* if_notinternalized) {
6875   DCHECK_EQ(MachineType::PointerRepresentation(), var_index->rep());
6876   DCHECK_EQ(MachineRepresentation::kTagged, var_unique->rep());
6877   Comment("TryToName");
6878 
6879   Label if_hascachedindex(this), if_keyisnotindex(this), if_thinstring(this),
6880       if_keyisother(this, Label::kDeferred);
6881   // Handle Smi and HeapNumber keys.
6882   var_index->Bind(TryToIntptr(key, &if_keyisnotindex));
6883   Goto(if_keyisindex);
6884 
6885   BIND(&if_keyisnotindex);
6886   Node* key_map = LoadMap(key);
6887   var_unique->Bind(key);
6888   // Symbols are unique.
6889   GotoIf(IsSymbolMap(key_map), if_keyisunique);
6890   Node* key_instance_type = LoadMapInstanceType(key_map);
6891   // Miss if |key| is not a String.
6892   STATIC_ASSERT(FIRST_NAME_TYPE == FIRST_TYPE);
6893   GotoIfNot(IsStringInstanceType(key_instance_type), &if_keyisother);
6894 
6895   // |key| is a String. Check if it has a cached array index.
6896   Node* hash = LoadNameHashField(key);
6897   GotoIf(IsClearWord32(hash, Name::kDoesNotContainCachedArrayIndexMask),
6898          &if_hascachedindex);
6899   // No cached array index. If the string knows that it contains an index,
6900   // then it must be an uncacheable index. Handle this case in the runtime.
6901   GotoIf(IsClearWord32(hash, Name::kIsNotArrayIndexMask), if_bailout);
6902   // Check if we have a ThinString.
6903   GotoIf(InstanceTypeEqual(key_instance_type, THIN_STRING_TYPE),
6904          &if_thinstring);
6905   GotoIf(InstanceTypeEqual(key_instance_type, THIN_ONE_BYTE_STRING_TYPE),
6906          &if_thinstring);
6907   // Finally, check if |key| is internalized.
6908   STATIC_ASSERT(kNotInternalizedTag != 0);
6909   GotoIf(IsSetWord32(key_instance_type, kIsNotInternalizedMask),
6910          if_notinternalized != nullptr ? if_notinternalized : if_bailout);
6911   Goto(if_keyisunique);
6912 
6913   BIND(&if_thinstring);
6914   var_unique->Bind(LoadObjectField(key, ThinString::kActualOffset));
6915   Goto(if_keyisunique);
6916 
6917   BIND(&if_hascachedindex);
6918   var_index->Bind(DecodeWordFromWord32<Name::ArrayIndexValueBits>(hash));
6919   Goto(if_keyisindex);
6920 
6921   BIND(&if_keyisother);
6922   GotoIfNot(InstanceTypeEqual(key_instance_type, ODDBALL_TYPE), if_bailout);
6923   var_unique->Bind(LoadObjectField(key, Oddball::kToStringOffset));
6924   Goto(if_keyisunique);
6925 }
6926 
TryInternalizeString(Node * string,Label * if_index,Variable * var_index,Label * if_internalized,Variable * var_internalized,Label * if_not_internalized,Label * if_bailout)6927 void CodeStubAssembler::TryInternalizeString(
6928     Node* string, Label* if_index, Variable* var_index, Label* if_internalized,
6929     Variable* var_internalized, Label* if_not_internalized, Label* if_bailout) {
6930   DCHECK(var_index->rep() == MachineType::PointerRepresentation());
6931   DCHECK_EQ(var_internalized->rep(), MachineRepresentation::kTagged);
6932   CSA_SLOW_ASSERT(this, IsString(string));
6933   Node* function =
6934       ExternalConstant(ExternalReference::try_internalize_string_function());
6935   Node* result = CallCFunction1(MachineType::AnyTagged(),
6936                                 MachineType::AnyTagged(), function, string);
6937   Label internalized(this);
6938   GotoIf(TaggedIsNotSmi(result), &internalized);
6939   Node* word_result = SmiUntag(result);
6940   GotoIf(WordEqual(word_result, IntPtrConstant(ResultSentinel::kNotFound)),
6941          if_not_internalized);
6942   GotoIf(WordEqual(word_result, IntPtrConstant(ResultSentinel::kUnsupported)),
6943          if_bailout);
6944   var_index->Bind(word_result);
6945   Goto(if_index);
6946 
6947   BIND(&internalized);
6948   var_internalized->Bind(result);
6949   Goto(if_internalized);
6950 }
6951 
6952 template <typename Dictionary>
EntryToIndex(TNode<IntPtrT> entry,int field_index)6953 TNode<IntPtrT> CodeStubAssembler::EntryToIndex(TNode<IntPtrT> entry,
6954                                                int field_index) {
6955   TNode<IntPtrT> entry_index =
6956       IntPtrMul(entry, IntPtrConstant(Dictionary::kEntrySize));
6957   return IntPtrAdd(entry_index, IntPtrConstant(Dictionary::kElementsStartIndex +
6958                                                field_index));
6959 }
6960 
6961 template TNode<IntPtrT> CodeStubAssembler::EntryToIndex<NameDictionary>(
6962     TNode<IntPtrT>, int);
6963 template TNode<IntPtrT> CodeStubAssembler::EntryToIndex<GlobalDictionary>(
6964     TNode<IntPtrT>, int);
6965 template TNode<IntPtrT> CodeStubAssembler::EntryToIndex<NumberDictionary>(
6966     TNode<IntPtrT>, int);
6967 
6968 // This must be kept in sync with HashTableBase::ComputeCapacity().
HashTableComputeCapacity(TNode<IntPtrT> at_least_space_for)6969 TNode<IntPtrT> CodeStubAssembler::HashTableComputeCapacity(
6970     TNode<IntPtrT> at_least_space_for) {
6971   TNode<IntPtrT> capacity = IntPtrRoundUpToPowerOfTwo32(
6972       IntPtrAdd(at_least_space_for, WordShr(at_least_space_for, 1)));
6973   return IntPtrMax(capacity, IntPtrConstant(HashTableBase::kMinCapacity));
6974 }
6975 
IntPtrMax(SloppyTNode<IntPtrT> left,SloppyTNode<IntPtrT> right)6976 TNode<IntPtrT> CodeStubAssembler::IntPtrMax(SloppyTNode<IntPtrT> left,
6977                                             SloppyTNode<IntPtrT> right) {
6978   intptr_t left_constant;
6979   intptr_t right_constant;
6980   if (ToIntPtrConstant(left, left_constant) &&
6981       ToIntPtrConstant(right, right_constant)) {
6982     return IntPtrConstant(std::max(left_constant, right_constant));
6983   }
6984   return SelectConstant<IntPtrT>(IntPtrGreaterThanOrEqual(left, right), left,
6985                                  right);
6986 }
6987 
IntPtrMin(SloppyTNode<IntPtrT> left,SloppyTNode<IntPtrT> right)6988 TNode<IntPtrT> CodeStubAssembler::IntPtrMin(SloppyTNode<IntPtrT> left,
6989                                             SloppyTNode<IntPtrT> right) {
6990   intptr_t left_constant;
6991   intptr_t right_constant;
6992   if (ToIntPtrConstant(left, left_constant) &&
6993       ToIntPtrConstant(right, right_constant)) {
6994     return IntPtrConstant(std::min(left_constant, right_constant));
6995   }
6996   return SelectConstant<IntPtrT>(IntPtrLessThanOrEqual(left, right), left,
6997                                  right);
6998 }
6999 
7000 template <>
LoadName(TNode<HeapObject> key)7001 TNode<HeapObject> CodeStubAssembler::LoadName<NameDictionary>(
7002     TNode<HeapObject> key) {
7003   CSA_ASSERT(this, Word32Or(IsTheHole(key), IsName(key)));
7004   return key;
7005 }
7006 
7007 template <>
LoadName(TNode<HeapObject> key)7008 TNode<HeapObject> CodeStubAssembler::LoadName<GlobalDictionary>(
7009     TNode<HeapObject> key) {
7010   TNode<PropertyCell> property_cell = CAST(key);
7011   return CAST(LoadObjectField(property_cell, PropertyCell::kNameOffset));
7012 }
7013 
7014 template <typename Dictionary>
NameDictionaryLookup(TNode<Dictionary> dictionary,TNode<Name> unique_name,Label * if_found,TVariable<IntPtrT> * var_name_index,Label * if_not_found,int inlined_probes,LookupMode mode)7015 void CodeStubAssembler::NameDictionaryLookup(
7016     TNode<Dictionary> dictionary, TNode<Name> unique_name, Label* if_found,
7017     TVariable<IntPtrT>* var_name_index, Label* if_not_found, int inlined_probes,
7018     LookupMode mode) {
7019   static_assert(std::is_same<Dictionary, NameDictionary>::value ||
7020                     std::is_same<Dictionary, GlobalDictionary>::value,
7021                 "Unexpected NameDictionary");
7022   DCHECK_EQ(MachineType::PointerRepresentation(), var_name_index->rep());
7023   DCHECK_IMPLIES(mode == kFindInsertionIndex,
7024                  inlined_probes == 0 && if_found == nullptr);
7025   Comment("NameDictionaryLookup");
7026 
7027   TNode<IntPtrT> capacity = SmiUntag(GetCapacity<Dictionary>(dictionary));
7028   TNode<WordT> mask = IntPtrSub(capacity, IntPtrConstant(1));
7029   TNode<WordT> hash = ChangeUint32ToWord(LoadNameHash(unique_name));
7030 
7031   // See Dictionary::FirstProbe().
7032   TNode<IntPtrT> count = IntPtrConstant(0);
7033   TNode<IntPtrT> entry = Signed(WordAnd(hash, mask));
7034   Node* undefined = UndefinedConstant();
7035 
7036   for (int i = 0; i < inlined_probes; i++) {
7037     TNode<IntPtrT> index = EntryToIndex<Dictionary>(entry);
7038     *var_name_index = index;
7039 
7040     TNode<HeapObject> current = CAST(LoadFixedArrayElement(dictionary, index));
7041     GotoIf(WordEqual(current, undefined), if_not_found);
7042     current = LoadName<Dictionary>(current);
7043     GotoIf(WordEqual(current, unique_name), if_found);
7044 
7045     // See Dictionary::NextProbe().
7046     count = IntPtrConstant(i + 1);
7047     entry = Signed(WordAnd(IntPtrAdd(entry, count), mask));
7048   }
7049   if (mode == kFindInsertionIndex) {
7050     // Appease the variable merging algorithm for "Goto(&loop)" below.
7051     *var_name_index = IntPtrConstant(0);
7052   }
7053 
7054   TVARIABLE(IntPtrT, var_count, count);
7055   TVARIABLE(IntPtrT, var_entry, entry);
7056   Variable* loop_vars[] = {&var_count, &var_entry, var_name_index};
7057   Label loop(this, 3, loop_vars);
7058   Goto(&loop);
7059   BIND(&loop);
7060   {
7061     TNode<IntPtrT> entry = var_entry.value();
7062 
7063     TNode<IntPtrT> index = EntryToIndex<Dictionary>(entry);
7064     *var_name_index = index;
7065 
7066     TNode<HeapObject> current = CAST(LoadFixedArrayElement(dictionary, index));
7067     GotoIf(WordEqual(current, undefined), if_not_found);
7068     if (mode == kFindExisting) {
7069       current = LoadName<Dictionary>(current);
7070       GotoIf(WordEqual(current, unique_name), if_found);
7071     } else {
7072       DCHECK_EQ(kFindInsertionIndex, mode);
7073       GotoIf(WordEqual(current, TheHoleConstant()), if_not_found);
7074     }
7075 
7076     // See Dictionary::NextProbe().
7077     Increment(&var_count);
7078     entry = Signed(WordAnd(IntPtrAdd(entry, var_count.value()), mask));
7079 
7080     var_entry = entry;
7081     Goto(&loop);
7082   }
7083 }
7084 
7085 // Instantiate template methods to workaround GCC compilation issue.
7086 template void CodeStubAssembler::NameDictionaryLookup<NameDictionary>(
7087     TNode<NameDictionary>, TNode<Name>, Label*, TVariable<IntPtrT>*, Label*,
7088     int, LookupMode);
7089 template void CodeStubAssembler::NameDictionaryLookup<GlobalDictionary>(
7090     TNode<GlobalDictionary>, TNode<Name>, Label*, TVariable<IntPtrT>*, Label*,
7091     int, LookupMode);
7092 
ComputeUnseededHash(Node * key)7093 Node* CodeStubAssembler::ComputeUnseededHash(Node* key) {
7094   // See v8::internal::ComputeUnseededHash()
7095   Node* hash = TruncateIntPtrToInt32(key);
7096   hash = Int32Add(Word32Xor(hash, Int32Constant(0xFFFFFFFF)),
7097                   Word32Shl(hash, Int32Constant(15)));
7098   hash = Word32Xor(hash, Word32Shr(hash, Int32Constant(12)));
7099   hash = Int32Add(hash, Word32Shl(hash, Int32Constant(2)));
7100   hash = Word32Xor(hash, Word32Shr(hash, Int32Constant(4)));
7101   hash = Int32Mul(hash, Int32Constant(2057));
7102   hash = Word32Xor(hash, Word32Shr(hash, Int32Constant(16)));
7103   return Word32And(hash, Int32Constant(0x3FFFFFFF));
7104 }
7105 
ComputeSeededHash(Node * key)7106 Node* CodeStubAssembler::ComputeSeededHash(Node* key) {
7107   Node* const function_addr =
7108       ExternalConstant(ExternalReference::compute_integer_hash());
7109   Node* const isolate_ptr =
7110       ExternalConstant(ExternalReference::isolate_address(isolate()));
7111 
7112   MachineType type_ptr = MachineType::Pointer();
7113   MachineType type_uint32 = MachineType::Uint32();
7114 
7115   Node* const result =
7116       CallCFunction2(type_uint32, type_ptr, type_uint32, function_addr,
7117                      isolate_ptr, TruncateIntPtrToInt32(key));
7118   return result;
7119 }
7120 
NumberDictionaryLookup(TNode<NumberDictionary> dictionary,TNode<IntPtrT> intptr_index,Label * if_found,TVariable<IntPtrT> * var_entry,Label * if_not_found)7121 void CodeStubAssembler::NumberDictionaryLookup(
7122     TNode<NumberDictionary> dictionary, TNode<IntPtrT> intptr_index,
7123     Label* if_found, TVariable<IntPtrT>* var_entry, Label* if_not_found) {
7124   CSA_ASSERT(this, IsNumberDictionary(dictionary));
7125   DCHECK_EQ(MachineType::PointerRepresentation(), var_entry->rep());
7126   Comment("NumberDictionaryLookup");
7127 
7128   TNode<IntPtrT> capacity = SmiUntag(GetCapacity<NumberDictionary>(dictionary));
7129   TNode<WordT> mask = IntPtrSub(capacity, IntPtrConstant(1));
7130 
7131   TNode<WordT> hash = ChangeUint32ToWord(ComputeSeededHash(intptr_index));
7132   Node* key_as_float64 = RoundIntPtrToFloat64(intptr_index);
7133 
7134   // See Dictionary::FirstProbe().
7135   TNode<IntPtrT> count = IntPtrConstant(0);
7136   TNode<IntPtrT> entry = Signed(WordAnd(hash, mask));
7137 
7138   Node* undefined = UndefinedConstant();
7139   Node* the_hole = TheHoleConstant();
7140 
7141   TVARIABLE(IntPtrT, var_count, count);
7142   Variable* loop_vars[] = {&var_count, var_entry};
7143   Label loop(this, 2, loop_vars);
7144   *var_entry = entry;
7145   Goto(&loop);
7146   BIND(&loop);
7147   {
7148     TNode<IntPtrT> entry = var_entry->value();
7149 
7150     TNode<IntPtrT> index = EntryToIndex<NumberDictionary>(entry);
7151     Node* current = LoadFixedArrayElement(dictionary, index);
7152     GotoIf(WordEqual(current, undefined), if_not_found);
7153     Label next_probe(this);
7154     {
7155       Label if_currentissmi(this), if_currentisnotsmi(this);
7156       Branch(TaggedIsSmi(current), &if_currentissmi, &if_currentisnotsmi);
7157       BIND(&if_currentissmi);
7158       {
7159         Node* current_value = SmiUntag(current);
7160         Branch(WordEqual(current_value, intptr_index), if_found, &next_probe);
7161       }
7162       BIND(&if_currentisnotsmi);
7163       {
7164         GotoIf(WordEqual(current, the_hole), &next_probe);
7165         // Current must be the Number.
7166         Node* current_value = LoadHeapNumberValue(current);
7167         Branch(Float64Equal(current_value, key_as_float64), if_found,
7168                &next_probe);
7169       }
7170     }
7171 
7172     BIND(&next_probe);
7173     // See Dictionary::NextProbe().
7174     Increment(&var_count);
7175     entry = Signed(WordAnd(IntPtrAdd(entry, var_count.value()), mask));
7176 
7177     *var_entry = entry;
7178     Goto(&loop);
7179   }
7180 }
7181 
7182 template <class Dictionary>
FindInsertionEntry(TNode<Dictionary> dictionary,TNode<Name> key,TVariable<IntPtrT> * var_key_index)7183 void CodeStubAssembler::FindInsertionEntry(TNode<Dictionary> dictionary,
7184                                            TNode<Name> key,
7185                                            TVariable<IntPtrT>* var_key_index) {
7186   UNREACHABLE();
7187 }
7188 
7189 template <>
FindInsertionEntry(TNode<NameDictionary> dictionary,TNode<Name> key,TVariable<IntPtrT> * var_key_index)7190 void CodeStubAssembler::FindInsertionEntry<NameDictionary>(
7191     TNode<NameDictionary> dictionary, TNode<Name> key,
7192     TVariable<IntPtrT>* var_key_index) {
7193   Label done(this);
7194   NameDictionaryLookup<NameDictionary>(dictionary, key, nullptr, var_key_index,
7195                                        &done, 0, kFindInsertionIndex);
7196   BIND(&done);
7197 }
7198 
7199 template <class Dictionary>
InsertEntry(TNode<Dictionary> dictionary,TNode<Name> key,TNode<Object> value,TNode<IntPtrT> index,TNode<Smi> enum_index)7200 void CodeStubAssembler::InsertEntry(TNode<Dictionary> dictionary,
7201                                     TNode<Name> key, TNode<Object> value,
7202                                     TNode<IntPtrT> index,
7203                                     TNode<Smi> enum_index) {
7204   UNREACHABLE();  // Use specializations instead.
7205 }
7206 
7207 template <>
InsertEntry(TNode<NameDictionary> dictionary,TNode<Name> name,TNode<Object> value,TNode<IntPtrT> index,TNode<Smi> enum_index)7208 void CodeStubAssembler::InsertEntry<NameDictionary>(
7209     TNode<NameDictionary> dictionary, TNode<Name> name, TNode<Object> value,
7210     TNode<IntPtrT> index, TNode<Smi> enum_index) {
7211   // Store name and value.
7212   StoreFixedArrayElement(dictionary, index, name);
7213   StoreValueByKeyIndex<NameDictionary>(dictionary, index, value);
7214 
7215   // Prepare details of the new property.
7216   PropertyDetails d(kData, NONE, PropertyCellType::kNoCell);
7217   enum_index =
7218       SmiShl(enum_index, PropertyDetails::DictionaryStorageField::kShift);
7219   // We OR over the actual index below, so we expect the initial value to be 0.
7220   DCHECK_EQ(0, d.dictionary_index());
7221   TVARIABLE(Smi, var_details, SmiOr(SmiConstant(d.AsSmi()), enum_index));
7222 
7223   // Private names must be marked non-enumerable.
7224   Label not_private(this, &var_details);
7225   GotoIfNot(IsPrivateSymbol(name), &not_private);
7226   TNode<Smi> dont_enum =
7227       SmiShl(SmiConstant(DONT_ENUM), PropertyDetails::AttributesField::kShift);
7228   var_details = SmiOr(var_details.value(), dont_enum);
7229   Goto(&not_private);
7230   BIND(&not_private);
7231 
7232   // Finally, store the details.
7233   StoreDetailsByKeyIndex<NameDictionary>(dictionary, index,
7234                                          var_details.value());
7235 }
7236 
7237 template <>
InsertEntry(TNode<GlobalDictionary> dictionary,TNode<Name> key,TNode<Object> value,TNode<IntPtrT> index,TNode<Smi> enum_index)7238 void CodeStubAssembler::InsertEntry<GlobalDictionary>(
7239     TNode<GlobalDictionary> dictionary, TNode<Name> key, TNode<Object> value,
7240     TNode<IntPtrT> index, TNode<Smi> enum_index) {
7241   UNIMPLEMENTED();
7242 }
7243 
7244 template <class Dictionary>
Add(TNode<Dictionary> dictionary,TNode<Name> key,TNode<Object> value,Label * bailout)7245 void CodeStubAssembler::Add(TNode<Dictionary> dictionary, TNode<Name> key,
7246                             TNode<Object> value, Label* bailout) {
7247   CSA_ASSERT(this, Word32BinaryNot(IsEmptyPropertyDictionary(dictionary)));
7248   TNode<Smi> capacity = GetCapacity<Dictionary>(dictionary);
7249   TNode<Smi> nof = GetNumberOfElements<Dictionary>(dictionary);
7250   TNode<Smi> new_nof = SmiAdd(nof, SmiConstant(1));
7251   // Require 33% to still be free after adding additional_elements.
7252   // Computing "x + (x >> 1)" on a Smi x does not return a valid Smi!
7253   // But that's OK here because it's only used for a comparison.
7254   TNode<Smi> required_capacity_pseudo_smi = SmiAdd(new_nof, SmiShr(new_nof, 1));
7255   GotoIf(SmiBelow(capacity, required_capacity_pseudo_smi), bailout);
7256   // Require rehashing if more than 50% of free elements are deleted elements.
7257   TNode<Smi> deleted = GetNumberOfDeletedElements<Dictionary>(dictionary);
7258   CSA_ASSERT(this, SmiAbove(capacity, new_nof));
7259   TNode<Smi> half_of_free_elements = SmiShr(SmiSub(capacity, new_nof), 1);
7260   GotoIf(SmiAbove(deleted, half_of_free_elements), bailout);
7261 
7262   TNode<Smi> enum_index = GetNextEnumerationIndex<Dictionary>(dictionary);
7263   TNode<Smi> new_enum_index = SmiAdd(enum_index, SmiConstant(1));
7264   TNode<Smi> max_enum_index =
7265       SmiConstant(PropertyDetails::DictionaryStorageField::kMax);
7266   GotoIf(SmiAbove(new_enum_index, max_enum_index), bailout);
7267 
7268   // No more bailouts after this point.
7269   // Operations from here on can have side effects.
7270 
7271   SetNextEnumerationIndex<Dictionary>(dictionary, new_enum_index);
7272   SetNumberOfElements<Dictionary>(dictionary, new_nof);
7273 
7274   TVARIABLE(IntPtrT, var_key_index);
7275   FindInsertionEntry<Dictionary>(dictionary, key, &var_key_index);
7276   InsertEntry<Dictionary>(dictionary, key, value, var_key_index.value(),
7277                           enum_index);
7278 }
7279 
7280 template void CodeStubAssembler::Add<NameDictionary>(TNode<NameDictionary>,
7281                                                      TNode<Name>, TNode<Object>,
7282                                                      Label*);
7283 
7284 template <typename Array>
LookupLinear(TNode<Name> unique_name,TNode<Array> array,TNode<Uint32T> number_of_valid_entries,Label * if_found,TVariable<IntPtrT> * var_name_index,Label * if_not_found)7285 void CodeStubAssembler::LookupLinear(TNode<Name> unique_name,
7286                                      TNode<Array> array,
7287                                      TNode<Uint32T> number_of_valid_entries,
7288                                      Label* if_found,
7289                                      TVariable<IntPtrT>* var_name_index,
7290                                      Label* if_not_found) {
7291   static_assert(std::is_base_of<FixedArray, Array>::value ||
7292                     std::is_base_of<TransitionArray, Array>::value,
7293                 "T must be a descendant of FixedArray or a TransitionArray");
7294   Comment("LookupLinear");
7295   TNode<IntPtrT> first_inclusive = IntPtrConstant(Array::ToKeyIndex(0));
7296   TNode<IntPtrT> factor = IntPtrConstant(Array::kEntrySize);
7297   TNode<IntPtrT> last_exclusive = IntPtrAdd(
7298       first_inclusive,
7299       IntPtrMul(ChangeInt32ToIntPtr(number_of_valid_entries), factor));
7300 
7301   BuildFastLoop(last_exclusive, first_inclusive,
7302                 [=](SloppyTNode<IntPtrT> name_index) {
7303                   TNode<MaybeObject> element =
7304                       LoadArrayElement(array, Array::kHeaderSize, name_index);
7305                   CSA_ASSERT(this, IsStrongHeapObject(element));
7306                   TNode<Name> candidate_name =
7307                       CAST(ToStrongHeapObject(element));
7308                   *var_name_index = name_index;
7309                   GotoIf(WordEqual(candidate_name, unique_name), if_found);
7310                 },
7311                 -Array::kEntrySize, INTPTR_PARAMETERS, IndexAdvanceMode::kPre);
7312   Goto(if_not_found);
7313 }
7314 
7315 template <>
NumberOfEntries(TNode<DescriptorArray> descriptors)7316 TNode<Uint32T> CodeStubAssembler::NumberOfEntries<DescriptorArray>(
7317     TNode<DescriptorArray> descriptors) {
7318   return Unsigned(LoadAndUntagToWord32FixedArrayElement(
7319       descriptors, IntPtrConstant(DescriptorArray::kDescriptorLengthIndex)));
7320 }
7321 
7322 template <>
NumberOfEntries(TNode<TransitionArray> transitions)7323 TNode<Uint32T> CodeStubAssembler::NumberOfEntries<TransitionArray>(
7324     TNode<TransitionArray> transitions) {
7325   TNode<IntPtrT> length = LoadAndUntagWeakFixedArrayLength(transitions);
7326   return Select<Uint32T>(
7327       UintPtrLessThan(length, IntPtrConstant(TransitionArray::kFirstIndex)),
7328       [=] { return Unsigned(Int32Constant(0)); },
7329       [=] {
7330         return Unsigned(LoadAndUntagToWord32ArrayElement(
7331             transitions, WeakFixedArray::kHeaderSize,
7332             IntPtrConstant(TransitionArray::kTransitionLengthIndex)));
7333       });
7334 }
7335 
7336 template <typename Array>
EntryIndexToIndex(TNode<Uint32T> entry_index)7337 TNode<IntPtrT> CodeStubAssembler::EntryIndexToIndex(
7338     TNode<Uint32T> entry_index) {
7339   TNode<Int32T> entry_size = Int32Constant(Array::kEntrySize);
7340   TNode<Word32T> index = Int32Mul(entry_index, entry_size);
7341   return ChangeInt32ToIntPtr(index);
7342 }
7343 
7344 template <typename Array>
ToKeyIndex(TNode<Uint32T> entry_index)7345 TNode<IntPtrT> CodeStubAssembler::ToKeyIndex(TNode<Uint32T> entry_index) {
7346   return IntPtrAdd(IntPtrConstant(Array::ToKeyIndex(0)),
7347                    EntryIndexToIndex<Array>(entry_index));
7348 }
7349 
7350 template TNode<IntPtrT> CodeStubAssembler::ToKeyIndex<DescriptorArray>(
7351     TNode<Uint32T>);
7352 template TNode<IntPtrT> CodeStubAssembler::ToKeyIndex<TransitionArray>(
7353     TNode<Uint32T>);
7354 
7355 template <>
GetSortedKeyIndex(TNode<DescriptorArray> descriptors,TNode<Uint32T> descriptor_number)7356 TNode<Uint32T> CodeStubAssembler::GetSortedKeyIndex<DescriptorArray>(
7357     TNode<DescriptorArray> descriptors, TNode<Uint32T> descriptor_number) {
7358   TNode<Uint32T> details =
7359       DescriptorArrayGetDetails(descriptors, descriptor_number);
7360   return DecodeWord32<PropertyDetails::DescriptorPointer>(details);
7361 }
7362 
7363 template <>
GetSortedKeyIndex(TNode<TransitionArray> transitions,TNode<Uint32T> transition_number)7364 TNode<Uint32T> CodeStubAssembler::GetSortedKeyIndex<TransitionArray>(
7365     TNode<TransitionArray> transitions, TNode<Uint32T> transition_number) {
7366   return transition_number;
7367 }
7368 
7369 template <typename Array>
GetKey(TNode<Array> array,TNode<Uint32T> entry_index)7370 TNode<Name> CodeStubAssembler::GetKey(TNode<Array> array,
7371                                       TNode<Uint32T> entry_index) {
7372   static_assert(std::is_base_of<FixedArray, Array>::value ||
7373                     std::is_base_of<TransitionArray, Array>::value,
7374                 "T must be a descendant of FixedArray or a TransitionArray");
7375   const int key_offset = Array::ToKeyIndex(0) * kPointerSize;
7376   TNode<MaybeObject> element =
7377       LoadArrayElement(array, Array::kHeaderSize,
7378                        EntryIndexToIndex<Array>(entry_index), key_offset);
7379   CSA_ASSERT(this, IsStrongHeapObject(element));
7380   return CAST(ToStrongHeapObject(element));
7381 }
7382 
7383 template TNode<Name> CodeStubAssembler::GetKey<DescriptorArray>(
7384     TNode<DescriptorArray>, TNode<Uint32T>);
7385 template TNode<Name> CodeStubAssembler::GetKey<TransitionArray>(
7386     TNode<TransitionArray>, TNode<Uint32T>);
7387 
DescriptorArrayGetDetails(TNode<DescriptorArray> descriptors,TNode<Uint32T> descriptor_number)7388 TNode<Uint32T> CodeStubAssembler::DescriptorArrayGetDetails(
7389     TNode<DescriptorArray> descriptors, TNode<Uint32T> descriptor_number) {
7390   const int details_offset = DescriptorArray::ToDetailsIndex(0) * kPointerSize;
7391   return Unsigned(LoadAndUntagToWord32FixedArrayElement(
7392       descriptors, EntryIndexToIndex<DescriptorArray>(descriptor_number),
7393       details_offset));
7394 }
7395 
7396 template <typename Array>
LookupBinary(TNode<Name> unique_name,TNode<Array> array,TNode<Uint32T> number_of_valid_entries,Label * if_found,TVariable<IntPtrT> * var_name_index,Label * if_not_found)7397 void CodeStubAssembler::LookupBinary(TNode<Name> unique_name,
7398                                      TNode<Array> array,
7399                                      TNode<Uint32T> number_of_valid_entries,
7400                                      Label* if_found,
7401                                      TVariable<IntPtrT>* var_name_index,
7402                                      Label* if_not_found) {
7403   Comment("LookupBinary");
7404   TVARIABLE(Uint32T, var_low, Unsigned(Int32Constant(0)));
7405   TNode<Uint32T> limit =
7406       Unsigned(Int32Sub(NumberOfEntries<Array>(array), Int32Constant(1)));
7407   TVARIABLE(Uint32T, var_high, limit);
7408   TNode<Uint32T> hash = LoadNameHashField(unique_name);
7409   CSA_ASSERT(this, Word32NotEqual(hash, Int32Constant(0)));
7410 
7411   // Assume non-empty array.
7412   CSA_ASSERT(this, Uint32LessThanOrEqual(var_low.value(), var_high.value()));
7413 
7414   Label binary_loop(this, {&var_high, &var_low});
7415   Goto(&binary_loop);
7416   BIND(&binary_loop);
7417   {
7418     // mid = low + (high - low) / 2 (to avoid overflow in "(low + high) / 2").
7419     TNode<Uint32T> mid = Unsigned(
7420         Int32Add(var_low.value(),
7421                  Word32Shr(Int32Sub(var_high.value(), var_low.value()), 1)));
7422     // mid_name = array->GetSortedKey(mid).
7423     TNode<Uint32T> sorted_key_index = GetSortedKeyIndex<Array>(array, mid);
7424     TNode<Name> mid_name = GetKey<Array>(array, sorted_key_index);
7425 
7426     TNode<Uint32T> mid_hash = LoadNameHashField(mid_name);
7427 
7428     Label mid_greater(this), mid_less(this), merge(this);
7429     Branch(Uint32GreaterThanOrEqual(mid_hash, hash), &mid_greater, &mid_less);
7430     BIND(&mid_greater);
7431     {
7432       var_high = mid;
7433       Goto(&merge);
7434     }
7435     BIND(&mid_less);
7436     {
7437       var_low = Unsigned(Int32Add(mid, Int32Constant(1)));
7438       Goto(&merge);
7439     }
7440     BIND(&merge);
7441     GotoIf(Word32NotEqual(var_low.value(), var_high.value()), &binary_loop);
7442   }
7443 
7444   Label scan_loop(this, &var_low);
7445   Goto(&scan_loop);
7446   BIND(&scan_loop);
7447   {
7448     GotoIf(Int32GreaterThan(var_low.value(), limit), if_not_found);
7449 
7450     TNode<Uint32T> sort_index =
7451         GetSortedKeyIndex<Array>(array, var_low.value());
7452     TNode<Name> current_name = GetKey<Array>(array, sort_index);
7453     TNode<Uint32T> current_hash = LoadNameHashField(current_name);
7454     GotoIf(Word32NotEqual(current_hash, hash), if_not_found);
7455     Label next(this);
7456     GotoIf(WordNotEqual(current_name, unique_name), &next);
7457     GotoIf(Uint32GreaterThanOrEqual(sort_index, number_of_valid_entries),
7458            if_not_found);
7459     *var_name_index = ToKeyIndex<Array>(sort_index);
7460     Goto(if_found);
7461 
7462     BIND(&next);
7463     var_low = Unsigned(Int32Add(var_low.value(), Int32Constant(1)));
7464     Goto(&scan_loop);
7465   }
7466 }
7467 
DescriptorArrayForEach(VariableList & variable_list,TNode<Uint32T> start_descriptor,TNode<Uint32T> end_descriptor,const ForEachDescriptorBodyFunction & body)7468 void CodeStubAssembler::DescriptorArrayForEach(
7469     VariableList& variable_list, TNode<Uint32T> start_descriptor,
7470     TNode<Uint32T> end_descriptor, const ForEachDescriptorBodyFunction& body) {
7471   TNode<IntPtrT> start_index =
7472       IntPtrAdd(IntPtrConstant(DescriptorArray::ToKeyIndex(0)),
7473                 EntryIndexToIndex<DescriptorArray>(start_descriptor));
7474 
7475   TNode<IntPtrT> end_index =
7476       IntPtrAdd(IntPtrConstant(DescriptorArray::ToKeyIndex(0)),
7477                 EntryIndexToIndex<DescriptorArray>(end_descriptor));
7478 
7479   BuildFastLoop(variable_list, start_index, end_index,
7480                 [=](Node* index) {
7481                   TNode<UintPtrT> descriptor_key_index =
7482                       TNode<UintPtrT>::UncheckedCast(index);
7483                   body(descriptor_key_index);
7484                 },
7485                 DescriptorArray::kEntrySize, INTPTR_PARAMETERS,
7486                 IndexAdvanceMode::kPost);
7487 }
7488 
DescriptorLookup(SloppyTNode<Name> unique_name,SloppyTNode<DescriptorArray> descriptors,SloppyTNode<Uint32T> bitfield3,Label * if_found,TVariable<IntPtrT> * var_name_index,Label * if_not_found)7489 void CodeStubAssembler::DescriptorLookup(
7490     SloppyTNode<Name> unique_name, SloppyTNode<DescriptorArray> descriptors,
7491     SloppyTNode<Uint32T> bitfield3, Label* if_found,
7492     TVariable<IntPtrT>* var_name_index, Label* if_not_found) {
7493   Comment("DescriptorArrayLookup");
7494   TNode<Uint32T> nof = DecodeWord32<Map::NumberOfOwnDescriptorsBits>(bitfield3);
7495   Lookup<DescriptorArray>(unique_name, descriptors, nof, if_found,
7496                           var_name_index, if_not_found);
7497 }
7498 
TransitionLookup(SloppyTNode<Name> unique_name,SloppyTNode<TransitionArray> transitions,Label * if_found,TVariable<IntPtrT> * var_name_index,Label * if_not_found)7499 void CodeStubAssembler::TransitionLookup(
7500     SloppyTNode<Name> unique_name, SloppyTNode<TransitionArray> transitions,
7501     Label* if_found, TVariable<IntPtrT>* var_name_index, Label* if_not_found) {
7502   Comment("TransitionArrayLookup");
7503   TNode<Uint32T> number_of_valid_transitions =
7504       NumberOfEntries<TransitionArray>(transitions);
7505   Lookup<TransitionArray>(unique_name, transitions, number_of_valid_transitions,
7506                           if_found, var_name_index, if_not_found);
7507 }
7508 
7509 template <typename Array>
Lookup(TNode<Name> unique_name,TNode<Array> array,TNode<Uint32T> number_of_valid_entries,Label * if_found,TVariable<IntPtrT> * var_name_index,Label * if_not_found)7510 void CodeStubAssembler::Lookup(TNode<Name> unique_name, TNode<Array> array,
7511                                TNode<Uint32T> number_of_valid_entries,
7512                                Label* if_found,
7513                                TVariable<IntPtrT>* var_name_index,
7514                                Label* if_not_found) {
7515   Comment("ArrayLookup");
7516   if (!number_of_valid_entries) {
7517     number_of_valid_entries = NumberOfEntries(array);
7518   }
7519   GotoIf(Word32Equal(number_of_valid_entries, Int32Constant(0)), if_not_found);
7520   Label linear_search(this), binary_search(this);
7521   const int kMaxElementsForLinearSearch = 32;
7522   Branch(Uint32LessThanOrEqual(number_of_valid_entries,
7523                                Int32Constant(kMaxElementsForLinearSearch)),
7524          &linear_search, &binary_search);
7525   BIND(&linear_search);
7526   {
7527     LookupLinear<Array>(unique_name, array, number_of_valid_entries, if_found,
7528                         var_name_index, if_not_found);
7529   }
7530   BIND(&binary_search);
7531   {
7532     LookupBinary<Array>(unique_name, array, number_of_valid_entries, if_found,
7533                         var_name_index, if_not_found);
7534   }
7535 }
7536 
IsSimpleObjectMap(TNode<Map> map)7537 TNode<BoolT> CodeStubAssembler::IsSimpleObjectMap(TNode<Map> map) {
7538   uint32_t mask =
7539       Map::HasNamedInterceptorBit::kMask | Map::IsAccessCheckNeededBit::kMask;
7540   // !IsSpecialReceiverType && !IsNamedInterceptor && !IsAccessCheckNeeded
7541   return Select<BoolT>(
7542       IsSpecialReceiverInstanceType(LoadMapInstanceType(map)),
7543       [=] { return Int32FalseConstant(); },
7544       [=] { return IsClearWord32(LoadMapBitField(map), mask); });
7545 }
7546 
TryLookupPropertyInSimpleObject(TNode<JSObject> object,TNode<Map> map,TNode<Name> unique_name,Label * if_found_fast,Label * if_found_dict,TVariable<HeapObject> * var_meta_storage,TVariable<IntPtrT> * var_name_index,Label * if_not_found)7547 void CodeStubAssembler::TryLookupPropertyInSimpleObject(
7548     TNode<JSObject> object, TNode<Map> map, TNode<Name> unique_name,
7549     Label* if_found_fast, Label* if_found_dict,
7550     TVariable<HeapObject>* var_meta_storage, TVariable<IntPtrT>* var_name_index,
7551     Label* if_not_found) {
7552   CSA_ASSERT(this, IsSimpleObjectMap(map));
7553 
7554   TNode<Uint32T> bit_field3 = LoadMapBitField3(map);
7555   Label if_isfastmap(this), if_isslowmap(this);
7556   Branch(IsSetWord32<Map::IsDictionaryMapBit>(bit_field3), &if_isslowmap,
7557          &if_isfastmap);
7558   BIND(&if_isfastmap);
7559   {
7560     TNode<DescriptorArray> descriptors = LoadMapDescriptors(map);
7561     *var_meta_storage = descriptors;
7562 
7563     DescriptorLookup(unique_name, descriptors, bit_field3, if_found_fast,
7564                      var_name_index, if_not_found);
7565   }
7566   BIND(&if_isslowmap);
7567   {
7568     TNode<NameDictionary> dictionary = CAST(LoadSlowProperties(object));
7569     *var_meta_storage = dictionary;
7570 
7571     NameDictionaryLookup<NameDictionary>(dictionary, unique_name, if_found_dict,
7572                                          var_name_index, if_not_found);
7573   }
7574 }
7575 
TryLookupProperty(SloppyTNode<JSObject> object,SloppyTNode<Map> map,SloppyTNode<Int32T> instance_type,SloppyTNode<Name> unique_name,Label * if_found_fast,Label * if_found_dict,Label * if_found_global,TVariable<HeapObject> * var_meta_storage,TVariable<IntPtrT> * var_name_index,Label * if_not_found,Label * if_bailout)7576 void CodeStubAssembler::TryLookupProperty(
7577     SloppyTNode<JSObject> object, SloppyTNode<Map> map,
7578     SloppyTNode<Int32T> instance_type, SloppyTNode<Name> unique_name,
7579     Label* if_found_fast, Label* if_found_dict, Label* if_found_global,
7580     TVariable<HeapObject>* var_meta_storage, TVariable<IntPtrT>* var_name_index,
7581     Label* if_not_found, Label* if_bailout) {
7582   Label if_objectisspecial(this);
7583   GotoIf(IsSpecialReceiverInstanceType(instance_type), &if_objectisspecial);
7584 
7585   TryLookupPropertyInSimpleObject(object, map, unique_name, if_found_fast,
7586                                   if_found_dict, var_meta_storage,
7587                                   var_name_index, if_not_found);
7588 
7589   BIND(&if_objectisspecial);
7590   {
7591     // Handle global object here and bailout for other special objects.
7592     GotoIfNot(InstanceTypeEqual(instance_type, JS_GLOBAL_OBJECT_TYPE),
7593               if_bailout);
7594 
7595     // Handle interceptors and access checks in runtime.
7596     TNode<Int32T> bit_field = LoadMapBitField(map);
7597     int mask =
7598         Map::HasNamedInterceptorBit::kMask | Map::IsAccessCheckNeededBit::kMask;
7599     GotoIf(IsSetWord32(bit_field, mask), if_bailout);
7600 
7601     TNode<GlobalDictionary> dictionary = CAST(LoadSlowProperties(object));
7602     *var_meta_storage = dictionary;
7603 
7604     NameDictionaryLookup<GlobalDictionary>(
7605         dictionary, unique_name, if_found_global, var_name_index, if_not_found);
7606   }
7607 }
7608 
TryHasOwnProperty(Node * object,Node * map,Node * instance_type,Node * unique_name,Label * if_found,Label * if_not_found,Label * if_bailout)7609 void CodeStubAssembler::TryHasOwnProperty(Node* object, Node* map,
7610                                           Node* instance_type,
7611                                           Node* unique_name, Label* if_found,
7612                                           Label* if_not_found,
7613                                           Label* if_bailout) {
7614   Comment("TryHasOwnProperty");
7615   TVARIABLE(HeapObject, var_meta_storage);
7616   TVARIABLE(IntPtrT, var_name_index);
7617 
7618   Label if_found_global(this);
7619   TryLookupProperty(object, map, instance_type, unique_name, if_found, if_found,
7620                     &if_found_global, &var_meta_storage, &var_name_index,
7621                     if_not_found, if_bailout);
7622 
7623   BIND(&if_found_global);
7624   {
7625     VARIABLE(var_value, MachineRepresentation::kTagged);
7626     VARIABLE(var_details, MachineRepresentation::kWord32);
7627     // Check if the property cell is not deleted.
7628     LoadPropertyFromGlobalDictionary(var_meta_storage.value(),
7629                                      var_name_index.value(), &var_value,
7630                                      &var_details, if_not_found);
7631     Goto(if_found);
7632   }
7633 }
7634 
GetMethod(Node * context,Node * object,Handle<Name> name,Label * if_null_or_undefined)7635 Node* CodeStubAssembler::GetMethod(Node* context, Node* object,
7636                                    Handle<Name> name,
7637                                    Label* if_null_or_undefined) {
7638   Node* method = GetProperty(context, object, name);
7639 
7640   GotoIf(IsUndefined(method), if_null_or_undefined);
7641   GotoIf(IsNull(method), if_null_or_undefined);
7642 
7643   return method;
7644 }
7645 
LoadPropertyFromFastObject(Node * object,Node * map,Node * descriptors,Node * name_index,Variable * var_details,Variable * var_value)7646 void CodeStubAssembler::LoadPropertyFromFastObject(Node* object, Node* map,
7647                                                    Node* descriptors,
7648                                                    Node* name_index,
7649                                                    Variable* var_details,
7650                                                    Variable* var_value) {
7651   DCHECK_EQ(MachineRepresentation::kWord32, var_details->rep());
7652   DCHECK_EQ(MachineRepresentation::kTagged, var_value->rep());
7653 
7654   Node* details =
7655       LoadDetailsByKeyIndex<DescriptorArray>(descriptors, name_index);
7656   var_details->Bind(details);
7657 
7658   LoadPropertyFromFastObject(object, map, descriptors, name_index, details,
7659                              var_value);
7660 }
7661 
LoadPropertyFromFastObject(Node * object,Node * map,Node * descriptors,Node * name_index,Node * details,Variable * var_value)7662 void CodeStubAssembler::LoadPropertyFromFastObject(Node* object, Node* map,
7663                                                    Node* descriptors,
7664                                                    Node* name_index,
7665                                                    Node* details,
7666                                                    Variable* var_value) {
7667   Comment("[ LoadPropertyFromFastObject");
7668 
7669   Node* location = DecodeWord32<PropertyDetails::LocationField>(details);
7670 
7671   Label if_in_field(this), if_in_descriptor(this), done(this);
7672   Branch(Word32Equal(location, Int32Constant(kField)), &if_in_field,
7673          &if_in_descriptor);
7674   BIND(&if_in_field);
7675   {
7676     Node* field_index =
7677         DecodeWordFromWord32<PropertyDetails::FieldIndexField>(details);
7678     Node* representation =
7679         DecodeWord32<PropertyDetails::RepresentationField>(details);
7680 
7681     field_index =
7682         IntPtrAdd(field_index, LoadMapInobjectPropertiesStartInWords(map));
7683     Node* instance_size_in_words = LoadMapInstanceSizeInWords(map);
7684 
7685     Label if_inobject(this), if_backing_store(this);
7686     VARIABLE(var_double_value, MachineRepresentation::kFloat64);
7687     Label rebox_double(this, &var_double_value);
7688     Branch(UintPtrLessThan(field_index, instance_size_in_words), &if_inobject,
7689            &if_backing_store);
7690     BIND(&if_inobject);
7691     {
7692       Comment("if_inobject");
7693       Node* field_offset = TimesPointerSize(field_index);
7694 
7695       Label if_double(this), if_tagged(this);
7696       Branch(Word32NotEqual(representation,
7697                             Int32Constant(Representation::kDouble)),
7698              &if_tagged, &if_double);
7699       BIND(&if_tagged);
7700       {
7701         var_value->Bind(LoadObjectField(object, field_offset));
7702         Goto(&done);
7703       }
7704       BIND(&if_double);
7705       {
7706         if (FLAG_unbox_double_fields) {
7707           var_double_value.Bind(
7708               LoadObjectField(object, field_offset, MachineType::Float64()));
7709         } else {
7710           Node* mutable_heap_number = LoadObjectField(object, field_offset);
7711           var_double_value.Bind(LoadHeapNumberValue(mutable_heap_number));
7712         }
7713         Goto(&rebox_double);
7714       }
7715     }
7716     BIND(&if_backing_store);
7717     {
7718       Comment("if_backing_store");
7719       Node* properties = LoadFastProperties(object);
7720       field_index = IntPtrSub(field_index, instance_size_in_words);
7721       Node* value = LoadPropertyArrayElement(properties, field_index);
7722 
7723       Label if_double(this), if_tagged(this);
7724       Branch(Word32NotEqual(representation,
7725                             Int32Constant(Representation::kDouble)),
7726              &if_tagged, &if_double);
7727       BIND(&if_tagged);
7728       {
7729         var_value->Bind(value);
7730         Goto(&done);
7731       }
7732       BIND(&if_double);
7733       {
7734         var_double_value.Bind(LoadHeapNumberValue(value));
7735         Goto(&rebox_double);
7736       }
7737     }
7738     BIND(&rebox_double);
7739     {
7740       Comment("rebox_double");
7741       Node* heap_number = AllocateHeapNumberWithValue(var_double_value.value());
7742       var_value->Bind(heap_number);
7743       Goto(&done);
7744     }
7745   }
7746   BIND(&if_in_descriptor);
7747   {
7748     var_value->Bind(
7749         LoadValueByKeyIndex<DescriptorArray>(descriptors, name_index));
7750     Goto(&done);
7751   }
7752   BIND(&done);
7753 
7754   Comment("] LoadPropertyFromFastObject");
7755 }
7756 
LoadPropertyFromNameDictionary(Node * dictionary,Node * name_index,Variable * var_details,Variable * var_value)7757 void CodeStubAssembler::LoadPropertyFromNameDictionary(Node* dictionary,
7758                                                        Node* name_index,
7759                                                        Variable* var_details,
7760                                                        Variable* var_value) {
7761   Comment("LoadPropertyFromNameDictionary");
7762   CSA_ASSERT(this, IsDictionary(dictionary));
7763 
7764   var_details->Bind(
7765       LoadDetailsByKeyIndex<NameDictionary>(dictionary, name_index));
7766   var_value->Bind(LoadValueByKeyIndex<NameDictionary>(dictionary, name_index));
7767 
7768   Comment("] LoadPropertyFromNameDictionary");
7769 }
7770 
LoadPropertyFromGlobalDictionary(Node * dictionary,Node * name_index,Variable * var_details,Variable * var_value,Label * if_deleted)7771 void CodeStubAssembler::LoadPropertyFromGlobalDictionary(Node* dictionary,
7772                                                          Node* name_index,
7773                                                          Variable* var_details,
7774                                                          Variable* var_value,
7775                                                          Label* if_deleted) {
7776   Comment("[ LoadPropertyFromGlobalDictionary");
7777   CSA_ASSERT(this, IsDictionary(dictionary));
7778 
7779   Node* property_cell = LoadFixedArrayElement(dictionary, name_index);
7780   CSA_ASSERT(this, IsPropertyCell(property_cell));
7781 
7782   Node* value = LoadObjectField(property_cell, PropertyCell::kValueOffset);
7783   GotoIf(WordEqual(value, TheHoleConstant()), if_deleted);
7784 
7785   var_value->Bind(value);
7786 
7787   Node* details = LoadAndUntagToWord32ObjectField(property_cell,
7788                                                   PropertyCell::kDetailsOffset);
7789   var_details->Bind(details);
7790 
7791   Comment("] LoadPropertyFromGlobalDictionary");
7792 }
7793 
7794 // |value| is the property backing store's contents, which is either a value
7795 // or an accessor pair, as specified by |details|.
7796 // Returns either the original value, or the result of the getter call.
CallGetterIfAccessor(Node * value,Node * details,Node * context,Node * receiver,Label * if_bailout,GetOwnPropertyMode mode)7797 TNode<Object> CodeStubAssembler::CallGetterIfAccessor(
7798     Node* value, Node* details, Node* context, Node* receiver,
7799     Label* if_bailout, GetOwnPropertyMode mode) {
7800   VARIABLE(var_value, MachineRepresentation::kTagged, value);
7801   Label done(this), if_accessor_info(this, Label::kDeferred);
7802 
7803   Node* kind = DecodeWord32<PropertyDetails::KindField>(details);
7804   GotoIf(Word32Equal(kind, Int32Constant(kData)), &done);
7805 
7806   // Accessor case.
7807   GotoIfNot(IsAccessorPair(value), &if_accessor_info);
7808 
7809   // AccessorPair case.
7810   {
7811     if (mode == kCallJSGetter) {
7812       Node* accessor_pair = value;
7813       Node* getter =
7814           LoadObjectField(accessor_pair, AccessorPair::kGetterOffset);
7815       Node* getter_map = LoadMap(getter);
7816       Node* instance_type = LoadMapInstanceType(getter_map);
7817       // FunctionTemplateInfo getters are not supported yet.
7818       GotoIf(InstanceTypeEqual(instance_type, FUNCTION_TEMPLATE_INFO_TYPE),
7819              if_bailout);
7820 
7821       // Return undefined if the {getter} is not callable.
7822       var_value.Bind(UndefinedConstant());
7823       GotoIfNot(IsCallableMap(getter_map), &done);
7824 
7825       // Call the accessor.
7826       Callable callable = CodeFactory::Call(isolate());
7827       Node* result = CallJS(callable, context, getter, receiver);
7828       var_value.Bind(result);
7829     }
7830     Goto(&done);
7831   }
7832 
7833   // AccessorInfo case.
7834   BIND(&if_accessor_info);
7835   {
7836     Node* accessor_info = value;
7837     CSA_ASSERT(this, IsAccessorInfo(value));
7838     CSA_ASSERT(this, TaggedIsNotSmi(receiver));
7839     Label if_array(this), if_function(this), if_value(this);
7840 
7841     // Dispatch based on {receiver} instance type.
7842     Node* receiver_map = LoadMap(receiver);
7843     Node* receiver_instance_type = LoadMapInstanceType(receiver_map);
7844     GotoIf(IsJSArrayInstanceType(receiver_instance_type), &if_array);
7845     GotoIf(IsJSFunctionInstanceType(receiver_instance_type), &if_function);
7846     Branch(IsJSValueInstanceType(receiver_instance_type), &if_value,
7847            if_bailout);
7848 
7849     // JSArray AccessorInfo case.
7850     BIND(&if_array);
7851     {
7852       // We only deal with the "length" accessor on JSArray.
7853       GotoIfNot(IsLengthString(
7854                     LoadObjectField(accessor_info, AccessorInfo::kNameOffset)),
7855                 if_bailout);
7856       var_value.Bind(LoadJSArrayLength(receiver));
7857       Goto(&done);
7858     }
7859 
7860     // JSFunction AccessorInfo case.
7861     BIND(&if_function);
7862     {
7863       // We only deal with the "prototype" accessor on JSFunction here.
7864       GotoIfNot(IsPrototypeString(
7865                     LoadObjectField(accessor_info, AccessorInfo::kNameOffset)),
7866                 if_bailout);
7867 
7868       // if (!(has_prototype_slot() && !has_non_instance_prototype())) use
7869       // generic property loading mechanism.
7870       GotoIfNot(
7871           Word32Equal(
7872               Word32And(LoadMapBitField(receiver_map),
7873                         Int32Constant(Map::HasPrototypeSlotBit::kMask |
7874                                       Map::HasNonInstancePrototypeBit::kMask)),
7875               Int32Constant(Map::HasPrototypeSlotBit::kMask)),
7876           if_bailout);
7877       var_value.Bind(LoadJSFunctionPrototype(receiver, if_bailout));
7878       Goto(&done);
7879     }
7880 
7881     // JSValue AccessorInfo case.
7882     BIND(&if_value);
7883     {
7884       // We only deal with the "length" accessor on JSValue string wrappers.
7885       GotoIfNot(IsLengthString(
7886                     LoadObjectField(accessor_info, AccessorInfo::kNameOffset)),
7887                 if_bailout);
7888       Node* receiver_value = LoadJSValueValue(receiver);
7889       GotoIfNot(TaggedIsNotSmi(receiver_value), if_bailout);
7890       GotoIfNot(IsString(receiver_value), if_bailout);
7891       var_value.Bind(LoadStringLengthAsSmi(receiver_value));
7892       Goto(&done);
7893     }
7894   }
7895 
7896   BIND(&done);
7897   return UncheckedCast<Object>(var_value.value());
7898 }
7899 
TryGetOwnProperty(Node * context,Node * receiver,Node * object,Node * map,Node * instance_type,Node * unique_name,Label * if_found_value,Variable * var_value,Label * if_not_found,Label * if_bailout)7900 void CodeStubAssembler::TryGetOwnProperty(
7901     Node* context, Node* receiver, Node* object, Node* map, Node* instance_type,
7902     Node* unique_name, Label* if_found_value, Variable* var_value,
7903     Label* if_not_found, Label* if_bailout) {
7904   TryGetOwnProperty(context, receiver, object, map, instance_type, unique_name,
7905                     if_found_value, var_value, nullptr, nullptr, if_not_found,
7906                     if_bailout, kCallJSGetter);
7907 }
7908 
TryGetOwnProperty(Node * context,Node * receiver,Node * object,Node * map,Node * instance_type,Node * unique_name,Label * if_found_value,Variable * var_value,Variable * var_details,Variable * var_raw_value,Label * if_not_found,Label * if_bailout,GetOwnPropertyMode mode)7909 void CodeStubAssembler::TryGetOwnProperty(
7910     Node* context, Node* receiver, Node* object, Node* map, Node* instance_type,
7911     Node* unique_name, Label* if_found_value, Variable* var_value,
7912     Variable* var_details, Variable* var_raw_value, Label* if_not_found,
7913     Label* if_bailout, GetOwnPropertyMode mode) {
7914   DCHECK_EQ(MachineRepresentation::kTagged, var_value->rep());
7915   Comment("TryGetOwnProperty");
7916 
7917   TVARIABLE(HeapObject, var_meta_storage);
7918   TVARIABLE(IntPtrT, var_entry);
7919 
7920   Label if_found_fast(this), if_found_dict(this), if_found_global(this);
7921 
7922   VARIABLE(local_var_details, MachineRepresentation::kWord32);
7923   if (!var_details) {
7924     var_details = &local_var_details;
7925   }
7926   Label if_found(this);
7927 
7928   TryLookupProperty(object, map, instance_type, unique_name, &if_found_fast,
7929                     &if_found_dict, &if_found_global, &var_meta_storage,
7930                     &var_entry, if_not_found, if_bailout);
7931   BIND(&if_found_fast);
7932   {
7933     Node* descriptors = var_meta_storage.value();
7934     Node* name_index = var_entry.value();
7935 
7936     LoadPropertyFromFastObject(object, map, descriptors, name_index,
7937                                var_details, var_value);
7938     Goto(&if_found);
7939   }
7940   BIND(&if_found_dict);
7941   {
7942     Node* dictionary = var_meta_storage.value();
7943     Node* entry = var_entry.value();
7944     LoadPropertyFromNameDictionary(dictionary, entry, var_details, var_value);
7945     Goto(&if_found);
7946   }
7947   BIND(&if_found_global);
7948   {
7949     Node* dictionary = var_meta_storage.value();
7950     Node* entry = var_entry.value();
7951 
7952     LoadPropertyFromGlobalDictionary(dictionary, entry, var_details, var_value,
7953                                      if_not_found);
7954     Goto(&if_found);
7955   }
7956   // Here we have details and value which could be an accessor.
7957   BIND(&if_found);
7958   {
7959     // TODO(ishell): Execute C++ accessor in case of accessor info
7960     if (var_raw_value) {
7961       var_raw_value->Bind(var_value->value());
7962     }
7963     Node* value = CallGetterIfAccessor(var_value->value(), var_details->value(),
7964                                        context, receiver, if_bailout, mode);
7965     var_value->Bind(value);
7966     Goto(if_found_value);
7967   }
7968 }
7969 
TryLookupElement(Node * object,Node * map,SloppyTNode<Int32T> instance_type,SloppyTNode<IntPtrT> intptr_index,Label * if_found,Label * if_absent,Label * if_not_found,Label * if_bailout)7970 void CodeStubAssembler::TryLookupElement(Node* object, Node* map,
7971                                          SloppyTNode<Int32T> instance_type,
7972                                          SloppyTNode<IntPtrT> intptr_index,
7973                                          Label* if_found, Label* if_absent,
7974                                          Label* if_not_found,
7975                                          Label* if_bailout) {
7976   // Handle special objects in runtime.
7977   GotoIf(IsSpecialReceiverInstanceType(instance_type), if_bailout);
7978 
7979   Node* elements_kind = LoadMapElementsKind(map);
7980 
7981   // TODO(verwaest): Support other elements kinds as well.
7982   Label if_isobjectorsmi(this), if_isdouble(this), if_isdictionary(this),
7983       if_isfaststringwrapper(this), if_isslowstringwrapper(this), if_oob(this),
7984       if_typedarray(this);
7985   // clang-format off
7986   int32_t values[] = {
7987       // Handled by {if_isobjectorsmi}.
7988       PACKED_SMI_ELEMENTS, HOLEY_SMI_ELEMENTS, PACKED_ELEMENTS,
7989           HOLEY_ELEMENTS,
7990       // Handled by {if_isdouble}.
7991       PACKED_DOUBLE_ELEMENTS, HOLEY_DOUBLE_ELEMENTS,
7992       // Handled by {if_isdictionary}.
7993       DICTIONARY_ELEMENTS,
7994       // Handled by {if_isfaststringwrapper}.
7995       FAST_STRING_WRAPPER_ELEMENTS,
7996       // Handled by {if_isslowstringwrapper}.
7997       SLOW_STRING_WRAPPER_ELEMENTS,
7998       // Handled by {if_not_found}.
7999       NO_ELEMENTS,
8000       // Handled by {if_typed_array}.
8001       UINT8_ELEMENTS,
8002       INT8_ELEMENTS,
8003       UINT16_ELEMENTS,
8004       INT16_ELEMENTS,
8005       UINT32_ELEMENTS,
8006       INT32_ELEMENTS,
8007       FLOAT32_ELEMENTS,
8008       FLOAT64_ELEMENTS,
8009       UINT8_CLAMPED_ELEMENTS,
8010       BIGUINT64_ELEMENTS,
8011       BIGINT64_ELEMENTS,
8012   };
8013   Label* labels[] = {
8014       &if_isobjectorsmi, &if_isobjectorsmi, &if_isobjectorsmi,
8015           &if_isobjectorsmi,
8016       &if_isdouble, &if_isdouble,
8017       &if_isdictionary,
8018       &if_isfaststringwrapper,
8019       &if_isslowstringwrapper,
8020       if_not_found,
8021       &if_typedarray,
8022       &if_typedarray,
8023       &if_typedarray,
8024       &if_typedarray,
8025       &if_typedarray,
8026       &if_typedarray,
8027       &if_typedarray,
8028       &if_typedarray,
8029       &if_typedarray,
8030       &if_typedarray,
8031       &if_typedarray,
8032   };
8033   // clang-format on
8034   STATIC_ASSERT(arraysize(values) == arraysize(labels));
8035   Switch(elements_kind, if_bailout, values, labels, arraysize(values));
8036 
8037   BIND(&if_isobjectorsmi);
8038   {
8039     Node* elements = LoadElements(object);
8040     Node* length = LoadAndUntagFixedArrayBaseLength(elements);
8041 
8042     GotoIfNot(UintPtrLessThan(intptr_index, length), &if_oob);
8043 
8044     Node* element = LoadFixedArrayElement(elements, intptr_index);
8045     Node* the_hole = TheHoleConstant();
8046     Branch(WordEqual(element, the_hole), if_not_found, if_found);
8047   }
8048   BIND(&if_isdouble);
8049   {
8050     Node* elements = LoadElements(object);
8051     Node* length = LoadAndUntagFixedArrayBaseLength(elements);
8052 
8053     GotoIfNot(UintPtrLessThan(intptr_index, length), &if_oob);
8054 
8055     // Check if the element is a double hole, but don't load it.
8056     LoadFixedDoubleArrayElement(elements, intptr_index, MachineType::None(), 0,
8057                                 INTPTR_PARAMETERS, if_not_found);
8058     Goto(if_found);
8059   }
8060   BIND(&if_isdictionary);
8061   {
8062     // Negative keys must be converted to property names.
8063     GotoIf(IntPtrLessThan(intptr_index, IntPtrConstant(0)), if_bailout);
8064 
8065     TVARIABLE(IntPtrT, var_entry);
8066     TNode<NumberDictionary> elements = CAST(LoadElements(object));
8067     NumberDictionaryLookup(elements, intptr_index, if_found, &var_entry,
8068                            if_not_found);
8069   }
8070   BIND(&if_isfaststringwrapper);
8071   {
8072     CSA_ASSERT(this, HasInstanceType(object, JS_VALUE_TYPE));
8073     Node* string = LoadJSValueValue(object);
8074     CSA_ASSERT(this, IsString(string));
8075     Node* length = LoadStringLengthAsWord(string);
8076     GotoIf(UintPtrLessThan(intptr_index, length), if_found);
8077     Goto(&if_isobjectorsmi);
8078   }
8079   BIND(&if_isslowstringwrapper);
8080   {
8081     CSA_ASSERT(this, HasInstanceType(object, JS_VALUE_TYPE));
8082     Node* string = LoadJSValueValue(object);
8083     CSA_ASSERT(this, IsString(string));
8084     Node* length = LoadStringLengthAsWord(string);
8085     GotoIf(UintPtrLessThan(intptr_index, length), if_found);
8086     Goto(&if_isdictionary);
8087   }
8088   BIND(&if_typedarray);
8089   {
8090     Node* buffer = LoadObjectField(object, JSArrayBufferView::kBufferOffset);
8091     GotoIf(IsDetachedBuffer(buffer), if_absent);
8092 
8093     Node* length = TryToIntptr(
8094         LoadObjectField(object, JSTypedArray::kLengthOffset), if_bailout);
8095     Branch(UintPtrLessThan(intptr_index, length), if_found, if_absent);
8096   }
8097   BIND(&if_oob);
8098   {
8099     // Positive OOB indices mean "not found", negative indices must be
8100     // converted to property names.
8101     GotoIf(IntPtrLessThan(intptr_index, IntPtrConstant(0)), if_bailout);
8102     Goto(if_not_found);
8103   }
8104 }
8105 
BranchIfMaybeSpecialIndex(TNode<String> name_string,Label * if_maybe_special_index,Label * if_not_special_index)8106 void CodeStubAssembler::BranchIfMaybeSpecialIndex(TNode<String> name_string,
8107                                                   Label* if_maybe_special_index,
8108                                                   Label* if_not_special_index) {
8109   // TODO(cwhan.tunz): Implement fast cases more.
8110 
8111   // If a name is empty or too long, it's not a special index
8112   // Max length of canonical double: -X.XXXXXXXXXXXXXXXXX-eXXX
8113   const int kBufferSize = 24;
8114   TNode<Smi> string_length = LoadStringLengthAsSmi(name_string);
8115   GotoIf(SmiEqual(string_length, SmiConstant(0)), if_not_special_index);
8116   GotoIf(SmiGreaterThan(string_length, SmiConstant(kBufferSize)),
8117          if_not_special_index);
8118 
8119   // If the first character of name is not a digit or '-', or we can't match it
8120   // to Infinity or NaN, then this is not a special index.
8121   TNode<Int32T> first_char = StringCharCodeAt(name_string, IntPtrConstant(0));
8122   // If the name starts with '-', it can be a negative index.
8123   GotoIf(Word32Equal(first_char, Int32Constant('-')), if_maybe_special_index);
8124   // If the name starts with 'I', it can be "Infinity".
8125   GotoIf(Word32Equal(first_char, Int32Constant('I')), if_maybe_special_index);
8126   // If the name starts with 'N', it can be "NaN".
8127   GotoIf(Word32Equal(first_char, Int32Constant('N')), if_maybe_special_index);
8128   // Finally, if the first character is not a digit either, then we are sure
8129   // that the name is not a special index.
8130   GotoIf(Uint32LessThan(first_char, Int32Constant('0')), if_not_special_index);
8131   GotoIf(Uint32LessThan(Int32Constant('9'), first_char), if_not_special_index);
8132   Goto(if_maybe_special_index);
8133 }
8134 
TryPrototypeChainLookup(Node * receiver,Node * key,const LookupInHolder & lookup_property_in_holder,const LookupInHolder & lookup_element_in_holder,Label * if_end,Label * if_bailout,Label * if_proxy)8135 void CodeStubAssembler::TryPrototypeChainLookup(
8136     Node* receiver, Node* key, const LookupInHolder& lookup_property_in_holder,
8137     const LookupInHolder& lookup_element_in_holder, Label* if_end,
8138     Label* if_bailout, Label* if_proxy) {
8139   // Ensure receiver is JSReceiver, otherwise bailout.
8140   Label if_objectisnotsmi(this);
8141   Branch(TaggedIsSmi(receiver), if_bailout, &if_objectisnotsmi);
8142   BIND(&if_objectisnotsmi);
8143 
8144   Node* map = LoadMap(receiver);
8145   Node* instance_type = LoadMapInstanceType(map);
8146   {
8147     Label if_objectisreceiver(this);
8148     STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
8149     STATIC_ASSERT(FIRST_JS_RECEIVER_TYPE == JS_PROXY_TYPE);
8150     Branch(IsJSReceiverInstanceType(instance_type), &if_objectisreceiver,
8151            if_bailout);
8152     BIND(&if_objectisreceiver);
8153 
8154     if (if_proxy) {
8155       GotoIf(InstanceTypeEqual(instance_type, JS_PROXY_TYPE), if_proxy);
8156     }
8157   }
8158 
8159   VARIABLE(var_index, MachineType::PointerRepresentation());
8160   VARIABLE(var_unique, MachineRepresentation::kTagged);
8161 
8162   Label if_keyisindex(this), if_iskeyunique(this);
8163   TryToName(key, &if_keyisindex, &var_index, &if_iskeyunique, &var_unique,
8164             if_bailout);
8165 
8166   BIND(&if_iskeyunique);
8167   {
8168     VARIABLE(var_holder, MachineRepresentation::kTagged, receiver);
8169     VARIABLE(var_holder_map, MachineRepresentation::kTagged, map);
8170     VARIABLE(var_holder_instance_type, MachineRepresentation::kWord32,
8171              instance_type);
8172 
8173     Variable* merged_variables[] = {&var_holder, &var_holder_map,
8174                                     &var_holder_instance_type};
8175     Label loop(this, arraysize(merged_variables), merged_variables);
8176     Goto(&loop);
8177     BIND(&loop);
8178     {
8179       Node* holder_map = var_holder_map.value();
8180       Node* holder_instance_type = var_holder_instance_type.value();
8181 
8182       Label next_proto(this), check_integer_indexed_exotic(this);
8183       lookup_property_in_holder(receiver, var_holder.value(), holder_map,
8184                                 holder_instance_type, var_unique.value(),
8185                                 &check_integer_indexed_exotic, if_bailout);
8186 
8187       BIND(&check_integer_indexed_exotic);
8188       {
8189         // Bailout if it can be an integer indexed exotic case.
8190         GotoIfNot(InstanceTypeEqual(holder_instance_type, JS_TYPED_ARRAY_TYPE),
8191                   &next_proto);
8192         GotoIfNot(IsString(var_unique.value()), &next_proto);
8193         BranchIfMaybeSpecialIndex(CAST(var_unique.value()), if_bailout,
8194                                   &next_proto);
8195       }
8196 
8197       BIND(&next_proto);
8198 
8199       Node* proto = LoadMapPrototype(holder_map);
8200 
8201       GotoIf(IsNull(proto), if_end);
8202 
8203       Node* map = LoadMap(proto);
8204       Node* instance_type = LoadMapInstanceType(map);
8205 
8206       var_holder.Bind(proto);
8207       var_holder_map.Bind(map);
8208       var_holder_instance_type.Bind(instance_type);
8209       Goto(&loop);
8210     }
8211   }
8212   BIND(&if_keyisindex);
8213   {
8214     VARIABLE(var_holder, MachineRepresentation::kTagged, receiver);
8215     VARIABLE(var_holder_map, MachineRepresentation::kTagged, map);
8216     VARIABLE(var_holder_instance_type, MachineRepresentation::kWord32,
8217              instance_type);
8218 
8219     Variable* merged_variables[] = {&var_holder, &var_holder_map,
8220                                     &var_holder_instance_type};
8221     Label loop(this, arraysize(merged_variables), merged_variables);
8222     Goto(&loop);
8223     BIND(&loop);
8224     {
8225       Label next_proto(this);
8226       lookup_element_in_holder(receiver, var_holder.value(),
8227                                var_holder_map.value(),
8228                                var_holder_instance_type.value(),
8229                                var_index.value(), &next_proto, if_bailout);
8230       BIND(&next_proto);
8231 
8232       Node* proto = LoadMapPrototype(var_holder_map.value());
8233 
8234       GotoIf(IsNull(proto), if_end);
8235 
8236       Node* map = LoadMap(proto);
8237       Node* instance_type = LoadMapInstanceType(map);
8238 
8239       var_holder.Bind(proto);
8240       var_holder_map.Bind(map);
8241       var_holder_instance_type.Bind(instance_type);
8242       Goto(&loop);
8243     }
8244   }
8245 }
8246 
HasInPrototypeChain(Node * context,Node * object,Node * prototype)8247 Node* CodeStubAssembler::HasInPrototypeChain(Node* context, Node* object,
8248                                              Node* prototype) {
8249   CSA_ASSERT(this, TaggedIsNotSmi(object));
8250   VARIABLE(var_result, MachineRepresentation::kTagged);
8251   Label return_false(this), return_true(this),
8252       return_runtime(this, Label::kDeferred), return_result(this);
8253 
8254   // Loop through the prototype chain looking for the {prototype}.
8255   VARIABLE(var_object_map, MachineRepresentation::kTagged, LoadMap(object));
8256   Label loop(this, &var_object_map);
8257   Goto(&loop);
8258   BIND(&loop);
8259   {
8260     // Check if we can determine the prototype directly from the {object_map}.
8261     Label if_objectisdirect(this), if_objectisspecial(this, Label::kDeferred);
8262     Node* object_map = var_object_map.value();
8263     TNode<Int32T> object_instance_type = LoadMapInstanceType(object_map);
8264     Branch(IsSpecialReceiverInstanceType(object_instance_type),
8265            &if_objectisspecial, &if_objectisdirect);
8266     BIND(&if_objectisspecial);
8267     {
8268       // The {object_map} is a special receiver map or a primitive map, check
8269       // if we need to use the if_objectisspecial path in the runtime.
8270       GotoIf(InstanceTypeEqual(object_instance_type, JS_PROXY_TYPE),
8271              &return_runtime);
8272       Node* object_bitfield = LoadMapBitField(object_map);
8273       int mask = Map::HasNamedInterceptorBit::kMask |
8274                  Map::IsAccessCheckNeededBit::kMask;
8275       Branch(IsSetWord32(object_bitfield, mask), &return_runtime,
8276              &if_objectisdirect);
8277     }
8278     BIND(&if_objectisdirect);
8279 
8280     // Check the current {object} prototype.
8281     Node* object_prototype = LoadMapPrototype(object_map);
8282     GotoIf(IsNull(object_prototype), &return_false);
8283     GotoIf(WordEqual(object_prototype, prototype), &return_true);
8284 
8285     // Continue with the prototype.
8286     CSA_ASSERT(this, TaggedIsNotSmi(object_prototype));
8287     var_object_map.Bind(LoadMap(object_prototype));
8288     Goto(&loop);
8289   }
8290 
8291   BIND(&return_true);
8292   var_result.Bind(TrueConstant());
8293   Goto(&return_result);
8294 
8295   BIND(&return_false);
8296   var_result.Bind(FalseConstant());
8297   Goto(&return_result);
8298 
8299   BIND(&return_runtime);
8300   {
8301     // Fallback to the runtime implementation.
8302     var_result.Bind(
8303         CallRuntime(Runtime::kHasInPrototypeChain, context, object, prototype));
8304   }
8305   Goto(&return_result);
8306 
8307   BIND(&return_result);
8308   return var_result.value();
8309 }
8310 
OrdinaryHasInstance(Node * context,Node * callable,Node * object)8311 Node* CodeStubAssembler::OrdinaryHasInstance(Node* context, Node* callable,
8312                                              Node* object) {
8313   VARIABLE(var_result, MachineRepresentation::kTagged);
8314   Label return_runtime(this, Label::kDeferred), return_result(this);
8315 
8316   // Goto runtime if {object} is a Smi.
8317   GotoIf(TaggedIsSmi(object), &return_runtime);
8318 
8319   // Goto runtime if {callable} is a Smi.
8320   GotoIf(TaggedIsSmi(callable), &return_runtime);
8321 
8322   // Load map of {callable}.
8323   Node* callable_map = LoadMap(callable);
8324 
8325   // Goto runtime if {callable} is not a JSFunction.
8326   Node* callable_instance_type = LoadMapInstanceType(callable_map);
8327   GotoIfNot(InstanceTypeEqual(callable_instance_type, JS_FUNCTION_TYPE),
8328             &return_runtime);
8329 
8330   // Goto runtime if {callable} is not a constructor or has
8331   // a non-instance "prototype".
8332   Node* callable_bitfield = LoadMapBitField(callable_map);
8333   GotoIfNot(Word32Equal(
8334                 Word32And(callable_bitfield,
8335                           Int32Constant(Map::HasNonInstancePrototypeBit::kMask |
8336                                         Map::IsConstructorBit::kMask)),
8337                 Int32Constant(Map::IsConstructorBit::kMask)),
8338             &return_runtime);
8339 
8340   // Get the "prototype" (or initial map) of the {callable}.
8341   Node* callable_prototype =
8342       LoadObjectField(callable, JSFunction::kPrototypeOrInitialMapOffset);
8343   {
8344     Label callable_prototype_valid(this);
8345     VARIABLE(var_callable_prototype, MachineRepresentation::kTagged,
8346              callable_prototype);
8347 
8348     // Resolve the "prototype" if the {callable} has an initial map.  Afterwards
8349     // the {callable_prototype} will be either the JSReceiver prototype object
8350     // or the hole value, which means that no instances of the {callable} were
8351     // created so far and hence we should return false.
8352     Node* callable_prototype_instance_type =
8353         LoadInstanceType(callable_prototype);
8354     GotoIfNot(InstanceTypeEqual(callable_prototype_instance_type, MAP_TYPE),
8355               &callable_prototype_valid);
8356     var_callable_prototype.Bind(
8357         LoadObjectField(callable_prototype, Map::kPrototypeOffset));
8358     Goto(&callable_prototype_valid);
8359     BIND(&callable_prototype_valid);
8360     callable_prototype = var_callable_prototype.value();
8361   }
8362 
8363   // Loop through the prototype chain looking for the {callable} prototype.
8364   var_result.Bind(HasInPrototypeChain(context, object, callable_prototype));
8365   Goto(&return_result);
8366 
8367   BIND(&return_runtime);
8368   {
8369     // Fallback to the runtime implementation.
8370     var_result.Bind(
8371         CallRuntime(Runtime::kOrdinaryHasInstance, context, callable, object));
8372   }
8373   Goto(&return_result);
8374 
8375   BIND(&return_result);
8376   return var_result.value();
8377 }
8378 
ElementOffsetFromIndex(Node * index_node,ElementsKind kind,ParameterMode mode,int base_size)8379 TNode<IntPtrT> CodeStubAssembler::ElementOffsetFromIndex(Node* index_node,
8380                                                          ElementsKind kind,
8381                                                          ParameterMode mode,
8382                                                          int base_size) {
8383   CSA_SLOW_ASSERT(this, MatchesParameterMode(index_node, mode));
8384   int element_size_shift = ElementsKindToShiftSize(kind);
8385   int element_size = 1 << element_size_shift;
8386   int const kSmiShiftBits = kSmiShiftSize + kSmiTagSize;
8387   intptr_t index = 0;
8388   bool constant_index = false;
8389   if (mode == SMI_PARAMETERS) {
8390     element_size_shift -= kSmiShiftBits;
8391     Smi* smi_index;
8392     constant_index = ToSmiConstant(index_node, smi_index);
8393     if (constant_index) index = smi_index->value();
8394     index_node = BitcastTaggedToWord(index_node);
8395   } else {
8396     DCHECK(mode == INTPTR_PARAMETERS);
8397     constant_index = ToIntPtrConstant(index_node, index);
8398   }
8399   if (constant_index) {
8400     return IntPtrConstant(base_size + element_size * index);
8401   }
8402 
8403   TNode<WordT> shifted_index =
8404       (element_size_shift == 0)
8405           ? UncheckedCast<WordT>(index_node)
8406           : ((element_size_shift > 0)
8407                  ? WordShl(index_node, IntPtrConstant(element_size_shift))
8408                  : WordSar(index_node, IntPtrConstant(-element_size_shift)));
8409   return IntPtrAdd(IntPtrConstant(base_size), Signed(shifted_index));
8410 }
8411 
IsOffsetInBounds(SloppyTNode<IntPtrT> offset,SloppyTNode<IntPtrT> length,int header_size,ElementsKind kind)8412 TNode<BoolT> CodeStubAssembler::IsOffsetInBounds(SloppyTNode<IntPtrT> offset,
8413                                                  SloppyTNode<IntPtrT> length,
8414                                                  int header_size,
8415                                                  ElementsKind kind) {
8416   // Make sure we point to the last field.
8417   int element_size = 1 << ElementsKindToShiftSize(kind);
8418   int correction = header_size - kHeapObjectTag - element_size;
8419   TNode<IntPtrT> last_offset =
8420       ElementOffsetFromIndex(length, kind, INTPTR_PARAMETERS, correction);
8421   return IntPtrLessThanOrEqual(offset, last_offset);
8422 }
8423 
LoadFeedbackVector(Node * closure)8424 Node* CodeStubAssembler::LoadFeedbackVector(Node* closure) {
8425   Node* feedback_cell =
8426       LoadObjectField(closure, JSFunction::kFeedbackCellOffset);
8427   CSA_ASSERT(this, IsFeedbackCell(feedback_cell));
8428   return LoadObjectField(feedback_cell, FeedbackCell::kValueOffset);
8429 }
8430 
LoadFeedbackVectorForStub()8431 Node* CodeStubAssembler::LoadFeedbackVectorForStub() {
8432   Node* function =
8433       LoadFromParentFrame(JavaScriptFrameConstants::kFunctionOffset);
8434   return LoadFeedbackVector(function);
8435 }
8436 
UpdateFeedback(Node * feedback,Node * feedback_vector,Node * slot_id)8437 void CodeStubAssembler::UpdateFeedback(Node* feedback, Node* feedback_vector,
8438                                        Node* slot_id) {
8439   // This method is used for binary op and compare feedback. These
8440   // vector nodes are initialized with a smi 0, so we can simply OR
8441   // our new feedback in place.
8442   TNode<MaybeObject> feedback_element =
8443       LoadFeedbackVectorSlot(feedback_vector, slot_id);
8444   CSA_ASSERT(this, IsObject(feedback_element));
8445   TNode<Smi> previous_feedback = CAST(ToObject(feedback_element));
8446   TNode<Smi> combined_feedback = SmiOr(previous_feedback, CAST(feedback));
8447   Label end(this);
8448 
8449   GotoIf(SmiEqual(previous_feedback, combined_feedback), &end);
8450   {
8451     StoreFeedbackVectorSlot(feedback_vector, slot_id, combined_feedback,
8452                             SKIP_WRITE_BARRIER);
8453     ReportFeedbackUpdate(feedback_vector, slot_id, "UpdateFeedback");
8454     Goto(&end);
8455   }
8456 
8457   BIND(&end);
8458 }
8459 
ReportFeedbackUpdate(SloppyTNode<FeedbackVector> feedback_vector,SloppyTNode<IntPtrT> slot_id,const char * reason)8460 void CodeStubAssembler::ReportFeedbackUpdate(
8461     SloppyTNode<FeedbackVector> feedback_vector, SloppyTNode<IntPtrT> slot_id,
8462     const char* reason) {
8463   // Reset profiler ticks.
8464   StoreObjectFieldNoWriteBarrier(
8465       feedback_vector, FeedbackVector::kProfilerTicksOffset, Int32Constant(0),
8466       MachineRepresentation::kWord32);
8467 
8468 #ifdef V8_TRACE_FEEDBACK_UPDATES
8469   // Trace the update.
8470   CallRuntime(Runtime::kInterpreterTraceUpdateFeedback, NoContextConstant(),
8471               LoadFromParentFrame(JavaScriptFrameConstants::kFunctionOffset),
8472               SmiTag(slot_id), StringConstant(reason));
8473 #endif  // V8_TRACE_FEEDBACK_UPDATES
8474 }
8475 
OverwriteFeedback(Variable * existing_feedback,int new_feedback)8476 void CodeStubAssembler::OverwriteFeedback(Variable* existing_feedback,
8477                                           int new_feedback) {
8478   if (existing_feedback == nullptr) return;
8479   existing_feedback->Bind(SmiConstant(new_feedback));
8480 }
8481 
CombineFeedback(Variable * existing_feedback,int feedback)8482 void CodeStubAssembler::CombineFeedback(Variable* existing_feedback,
8483                                         int feedback) {
8484   if (existing_feedback == nullptr) return;
8485   existing_feedback->Bind(
8486       SmiOr(CAST(existing_feedback->value()), SmiConstant(feedback)));
8487 }
8488 
CombineFeedback(Variable * existing_feedback,Node * feedback)8489 void CodeStubAssembler::CombineFeedback(Variable* existing_feedback,
8490                                         Node* feedback) {
8491   if (existing_feedback == nullptr) return;
8492   existing_feedback->Bind(
8493       SmiOr(CAST(existing_feedback->value()), CAST(feedback)));
8494 }
8495 
CheckForAssociatedProtector(Node * name,Label * if_protector)8496 void CodeStubAssembler::CheckForAssociatedProtector(Node* name,
8497                                                     Label* if_protector) {
8498   // This list must be kept in sync with LookupIterator::UpdateProtector!
8499   // TODO(jkummerow): Would it be faster to have a bit in Symbol::flags()?
8500   GotoIf(WordEqual(name, LoadRoot(Heap::kconstructor_stringRootIndex)),
8501          if_protector);
8502   GotoIf(WordEqual(name, LoadRoot(Heap::kiterator_symbolRootIndex)),
8503          if_protector);
8504   GotoIf(WordEqual(name, LoadRoot(Heap::knext_stringRootIndex)), if_protector);
8505   GotoIf(WordEqual(name, LoadRoot(Heap::kspecies_symbolRootIndex)),
8506          if_protector);
8507   GotoIf(WordEqual(name, LoadRoot(Heap::kis_concat_spreadable_symbolRootIndex)),
8508          if_protector);
8509   GotoIf(WordEqual(name, LoadRoot(Heap::kresolve_stringRootIndex)),
8510          if_protector);
8511   GotoIf(WordEqual(name, LoadRoot(Heap::kthen_stringRootIndex)), if_protector);
8512   // Fall through if no case matched.
8513 }
8514 
LoadReceiverMap(SloppyTNode<Object> receiver)8515 TNode<Map> CodeStubAssembler::LoadReceiverMap(SloppyTNode<Object> receiver) {
8516   return Select<Map>(
8517       TaggedIsSmi(receiver),
8518       [=] { return CAST(LoadRoot(Heap::kHeapNumberMapRootIndex)); },
8519       [=] { return LoadMap(UncheckedCast<HeapObject>(receiver)); });
8520 }
8521 
TryToIntptr(Node * key,Label * miss)8522 TNode<IntPtrT> CodeStubAssembler::TryToIntptr(Node* key, Label* miss) {
8523   TVARIABLE(IntPtrT, var_intptr_key);
8524   Label done(this, &var_intptr_key), key_is_smi(this);
8525   GotoIf(TaggedIsSmi(key), &key_is_smi);
8526   // Try to convert a heap number to a Smi.
8527   GotoIfNot(IsHeapNumber(key), miss);
8528   {
8529     TNode<Float64T> value = LoadHeapNumberValue(key);
8530     TNode<Int32T> int_value = RoundFloat64ToInt32(value);
8531     GotoIfNot(Float64Equal(value, ChangeInt32ToFloat64(int_value)), miss);
8532     var_intptr_key = ChangeInt32ToIntPtr(int_value);
8533     Goto(&done);
8534   }
8535 
8536   BIND(&key_is_smi);
8537   {
8538     var_intptr_key = SmiUntag(key);
8539     Goto(&done);
8540   }
8541 
8542   BIND(&done);
8543   return var_intptr_key.value();
8544 }
8545 
EmitKeyedSloppyArguments(Node * receiver,Node * key,Node * value,Label * bailout)8546 Node* CodeStubAssembler::EmitKeyedSloppyArguments(Node* receiver, Node* key,
8547                                                   Node* value, Label* bailout) {
8548   // Mapped arguments are actual arguments. Unmapped arguments are values added
8549   // to the arguments object after it was created for the call. Mapped arguments
8550   // are stored in the context at indexes given by elements[key + 2]. Unmapped
8551   // arguments are stored as regular indexed properties in the arguments array,
8552   // held at elements[1]. See NewSloppyArguments() in runtime.cc for a detailed
8553   // look at argument object construction.
8554   //
8555   // The sloppy arguments elements array has a special format:
8556   //
8557   // 0: context
8558   // 1: unmapped arguments array
8559   // 2: mapped_index0,
8560   // 3: mapped_index1,
8561   // ...
8562   //
8563   // length is 2 + min(number_of_actual_arguments, number_of_formal_arguments).
8564   // If key + 2 >= elements.length then attempt to look in the unmapped
8565   // arguments array (given by elements[1]) and return the value at key, missing
8566   // to the runtime if the unmapped arguments array is not a fixed array or if
8567   // key >= unmapped_arguments_array.length.
8568   //
8569   // Otherwise, t = elements[key + 2]. If t is the hole, then look up the value
8570   // in the unmapped arguments array, as described above. Otherwise, t is a Smi
8571   // index into the context array given at elements[0]. Return the value at
8572   // context[t].
8573 
8574   bool is_load = value == nullptr;
8575 
8576   GotoIfNot(TaggedIsSmi(key), bailout);
8577   key = SmiUntag(key);
8578   GotoIf(IntPtrLessThan(key, IntPtrConstant(0)), bailout);
8579 
8580   Node* elements = LoadElements(receiver);
8581   Node* elements_length = LoadAndUntagFixedArrayBaseLength(elements);
8582 
8583   VARIABLE(var_result, MachineRepresentation::kTagged);
8584   if (!is_load) {
8585     var_result.Bind(value);
8586   }
8587   Label if_mapped(this), if_unmapped(this), end(this, &var_result);
8588   Node* intptr_two = IntPtrConstant(2);
8589   Node* adjusted_length = IntPtrSub(elements_length, intptr_two);
8590 
8591   GotoIf(UintPtrGreaterThanOrEqual(key, adjusted_length), &if_unmapped);
8592 
8593   Node* mapped_index =
8594       LoadFixedArrayElement(elements, IntPtrAdd(key, intptr_two));
8595   Branch(WordEqual(mapped_index, TheHoleConstant()), &if_unmapped, &if_mapped);
8596 
8597   BIND(&if_mapped);
8598   {
8599     CSA_ASSERT(this, TaggedIsSmi(mapped_index));
8600     mapped_index = SmiUntag(mapped_index);
8601     Node* the_context = LoadFixedArrayElement(elements, 0);
8602     // Assert that we can use LoadFixedArrayElement/StoreFixedArrayElement
8603     // methods for accessing Context.
8604     STATIC_ASSERT(Context::kHeaderSize == FixedArray::kHeaderSize);
8605     DCHECK_EQ(Context::SlotOffset(0) + kHeapObjectTag,
8606               FixedArray::OffsetOfElementAt(0));
8607     if (is_load) {
8608       Node* result = LoadFixedArrayElement(the_context, mapped_index);
8609       CSA_ASSERT(this, WordNotEqual(result, TheHoleConstant()));
8610       var_result.Bind(result);
8611     } else {
8612       StoreFixedArrayElement(the_context, mapped_index, value);
8613     }
8614     Goto(&end);
8615   }
8616 
8617   BIND(&if_unmapped);
8618   {
8619     Node* backing_store = LoadFixedArrayElement(elements, 1);
8620     GotoIf(WordNotEqual(LoadMap(backing_store), FixedArrayMapConstant()),
8621            bailout);
8622 
8623     Node* backing_store_length =
8624         LoadAndUntagFixedArrayBaseLength(backing_store);
8625     GotoIf(UintPtrGreaterThanOrEqual(key, backing_store_length), bailout);
8626 
8627     // The key falls into unmapped range.
8628     if (is_load) {
8629       Node* result = LoadFixedArrayElement(backing_store, key);
8630       GotoIf(WordEqual(result, TheHoleConstant()), bailout);
8631       var_result.Bind(result);
8632     } else {
8633       StoreFixedArrayElement(backing_store, key, value);
8634     }
8635     Goto(&end);
8636   }
8637 
8638   BIND(&end);
8639   return var_result.value();
8640 }
8641 
LoadScriptContext(TNode<Context> context,TNode<IntPtrT> context_index)8642 TNode<Context> CodeStubAssembler::LoadScriptContext(
8643     TNode<Context> context, TNode<IntPtrT> context_index) {
8644   TNode<Context> native_context = LoadNativeContext(context);
8645   TNode<ScriptContextTable> script_context_table = CAST(
8646       LoadContextElement(native_context, Context::SCRIPT_CONTEXT_TABLE_INDEX));
8647 
8648   TNode<Context> script_context = CAST(LoadFixedArrayElement(
8649       script_context_table, context_index,
8650       ScriptContextTable::kFirstContextSlotIndex * kPointerSize));
8651   return script_context;
8652 }
8653 
8654 namespace {
8655 
8656 // Converts typed array elements kind to a machine representations.
ElementsKindToMachineRepresentation(ElementsKind kind)8657 MachineRepresentation ElementsKindToMachineRepresentation(ElementsKind kind) {
8658   switch (kind) {
8659     case UINT8_CLAMPED_ELEMENTS:
8660     case UINT8_ELEMENTS:
8661     case INT8_ELEMENTS:
8662       return MachineRepresentation::kWord8;
8663     case UINT16_ELEMENTS:
8664     case INT16_ELEMENTS:
8665       return MachineRepresentation::kWord16;
8666     case UINT32_ELEMENTS:
8667     case INT32_ELEMENTS:
8668       return MachineRepresentation::kWord32;
8669     case FLOAT32_ELEMENTS:
8670       return MachineRepresentation::kFloat32;
8671     case FLOAT64_ELEMENTS:
8672       return MachineRepresentation::kFloat64;
8673     default:
8674       UNREACHABLE();
8675   }
8676 }
8677 
8678 }  // namespace
8679 
StoreElement(Node * elements,ElementsKind kind,Node * index,Node * value,ParameterMode mode)8680 void CodeStubAssembler::StoreElement(Node* elements, ElementsKind kind,
8681                                      Node* index, Node* value,
8682                                      ParameterMode mode) {
8683   if (IsFixedTypedArrayElementsKind(kind)) {
8684     if (kind == UINT8_CLAMPED_ELEMENTS) {
8685       CSA_ASSERT(this,
8686                  Word32Equal(value, Word32And(Int32Constant(0xFF), value)));
8687     }
8688     Node* offset = ElementOffsetFromIndex(index, kind, mode, 0);
8689     // TODO(cbruni): Add OOB check once typed.
8690     MachineRepresentation rep = ElementsKindToMachineRepresentation(kind);
8691     StoreNoWriteBarrier(rep, elements, offset, value);
8692     return;
8693   } else if (IsDoubleElementsKind(kind)) {
8694     // Make sure we do not store signalling NaNs into double arrays.
8695     value = Float64SilenceNaN(value);
8696     StoreFixedDoubleArrayElement(elements, index, value, mode);
8697   } else {
8698     WriteBarrierMode barrier_mode =
8699         IsSmiElementsKind(kind) ? SKIP_WRITE_BARRIER : UPDATE_WRITE_BARRIER;
8700     StoreFixedArrayElement(elements, index, value, barrier_mode, 0, mode);
8701   }
8702 }
8703 
Int32ToUint8Clamped(Node * int32_value)8704 Node* CodeStubAssembler::Int32ToUint8Clamped(Node* int32_value) {
8705   Label done(this);
8706   Node* int32_zero = Int32Constant(0);
8707   Node* int32_255 = Int32Constant(255);
8708   VARIABLE(var_value, MachineRepresentation::kWord32, int32_value);
8709   GotoIf(Uint32LessThanOrEqual(int32_value, int32_255), &done);
8710   var_value.Bind(int32_zero);
8711   GotoIf(Int32LessThan(int32_value, int32_zero), &done);
8712   var_value.Bind(int32_255);
8713   Goto(&done);
8714   BIND(&done);
8715   return var_value.value();
8716 }
8717 
Float64ToUint8Clamped(Node * float64_value)8718 Node* CodeStubAssembler::Float64ToUint8Clamped(Node* float64_value) {
8719   Label done(this);
8720   VARIABLE(var_value, MachineRepresentation::kWord32, Int32Constant(0));
8721   GotoIf(Float64LessThanOrEqual(float64_value, Float64Constant(0.0)), &done);
8722   var_value.Bind(Int32Constant(255));
8723   GotoIf(Float64LessThanOrEqual(Float64Constant(255.0), float64_value), &done);
8724   {
8725     Node* rounded_value = Float64RoundToEven(float64_value);
8726     var_value.Bind(TruncateFloat64ToWord32(rounded_value));
8727     Goto(&done);
8728   }
8729   BIND(&done);
8730   return var_value.value();
8731 }
8732 
PrepareValueForWriteToTypedArray(TNode<Object> input,ElementsKind elements_kind,TNode<Context> context)8733 Node* CodeStubAssembler::PrepareValueForWriteToTypedArray(
8734     TNode<Object> input, ElementsKind elements_kind, TNode<Context> context) {
8735   DCHECK(IsFixedTypedArrayElementsKind(elements_kind));
8736 
8737   MachineRepresentation rep;
8738   switch (elements_kind) {
8739     case UINT8_ELEMENTS:
8740     case INT8_ELEMENTS:
8741     case UINT16_ELEMENTS:
8742     case INT16_ELEMENTS:
8743     case UINT32_ELEMENTS:
8744     case INT32_ELEMENTS:
8745     case UINT8_CLAMPED_ELEMENTS:
8746       rep = MachineRepresentation::kWord32;
8747       break;
8748     case FLOAT32_ELEMENTS:
8749       rep = MachineRepresentation::kFloat32;
8750       break;
8751     case FLOAT64_ELEMENTS:
8752       rep = MachineRepresentation::kFloat64;
8753       break;
8754     default:
8755       UNREACHABLE();
8756   }
8757 
8758   VARIABLE(var_result, rep);
8759   VARIABLE(var_input, MachineRepresentation::kTagged, input);
8760   Label done(this, &var_result), if_smi(this), if_heapnumber_or_oddball(this),
8761       convert(this), loop(this, &var_input);
8762   Goto(&loop);
8763   BIND(&loop);
8764   GotoIf(TaggedIsSmi(var_input.value()), &if_smi);
8765   // We can handle both HeapNumber and Oddball here, since Oddball has the
8766   // same layout as the HeapNumber for the HeapNumber::value field. This
8767   // way we can also properly optimize stores of oddballs to typed arrays.
8768   GotoIf(IsHeapNumber(var_input.value()), &if_heapnumber_or_oddball);
8769   STATIC_ASSERT(HeapNumber::kValueOffset == Oddball::kToNumberRawOffset);
8770   Branch(HasInstanceType(var_input.value(), ODDBALL_TYPE),
8771          &if_heapnumber_or_oddball, &convert);
8772 
8773   BIND(&if_heapnumber_or_oddball);
8774   {
8775     Node* value = UncheckedCast<Float64T>(LoadObjectField(
8776         var_input.value(), HeapNumber::kValueOffset, MachineType::Float64()));
8777     if (rep == MachineRepresentation::kWord32) {
8778       if (elements_kind == UINT8_CLAMPED_ELEMENTS) {
8779         value = Float64ToUint8Clamped(value);
8780       } else {
8781         value = TruncateFloat64ToWord32(value);
8782       }
8783     } else if (rep == MachineRepresentation::kFloat32) {
8784       value = TruncateFloat64ToFloat32(value);
8785     } else {
8786       DCHECK_EQ(MachineRepresentation::kFloat64, rep);
8787     }
8788     var_result.Bind(value);
8789     Goto(&done);
8790   }
8791 
8792   BIND(&if_smi);
8793   {
8794     Node* value = SmiToInt32(var_input.value());
8795     if (rep == MachineRepresentation::kFloat32) {
8796       value = RoundInt32ToFloat32(value);
8797     } else if (rep == MachineRepresentation::kFloat64) {
8798       value = ChangeInt32ToFloat64(value);
8799     } else {
8800       DCHECK_EQ(MachineRepresentation::kWord32, rep);
8801       if (elements_kind == UINT8_CLAMPED_ELEMENTS) {
8802         value = Int32ToUint8Clamped(value);
8803       }
8804     }
8805     var_result.Bind(value);
8806     Goto(&done);
8807   }
8808 
8809   BIND(&convert);
8810   {
8811     var_input.Bind(CallBuiltin(Builtins::kNonNumberToNumber, context, input));
8812     Goto(&loop);
8813   }
8814 
8815   BIND(&done);
8816   return var_result.value();
8817 }
8818 
EmitBigTypedArrayElementStore(TNode<JSTypedArray> object,TNode<FixedTypedArrayBase> elements,TNode<IntPtrT> intptr_key,TNode<Object> value,TNode<Context> context,Label * opt_if_neutered)8819 void CodeStubAssembler::EmitBigTypedArrayElementStore(
8820     TNode<JSTypedArray> object, TNode<FixedTypedArrayBase> elements,
8821     TNode<IntPtrT> intptr_key, TNode<Object> value, TNode<Context> context,
8822     Label* opt_if_neutered) {
8823   TNode<BigInt> bigint_value = ToBigInt(context, value);
8824 
8825   if (opt_if_neutered != nullptr) {
8826     // Check if buffer has been neutered. Must happen after {ToBigInt}!
8827     Node* buffer = LoadObjectField(object, JSArrayBufferView::kBufferOffset);
8828     GotoIf(IsDetachedBuffer(buffer), opt_if_neutered);
8829   }
8830 
8831   TNode<RawPtrT> backing_store = LoadFixedTypedArrayBackingStore(elements);
8832   TNode<IntPtrT> offset = ElementOffsetFromIndex(intptr_key, BIGINT64_ELEMENTS,
8833                                                  INTPTR_PARAMETERS, 0);
8834   EmitBigTypedArrayElementStore(elements, backing_store, offset, bigint_value);
8835 }
8836 
EmitBigTypedArrayElementStore(TNode<FixedTypedArrayBase> elements,TNode<RawPtrT> backing_store,TNode<IntPtrT> offset,TNode<BigInt> bigint_value)8837 void CodeStubAssembler::EmitBigTypedArrayElementStore(
8838     TNode<FixedTypedArrayBase> elements, TNode<RawPtrT> backing_store,
8839     TNode<IntPtrT> offset, TNode<BigInt> bigint_value) {
8840   TNode<WordT> bitfield = LoadBigIntBitfield(bigint_value);
8841   TNode<UintPtrT> length = DecodeWord<BigIntBase::LengthBits>(bitfield);
8842   TNode<UintPtrT> sign = DecodeWord<BigIntBase::SignBits>(bitfield);
8843   TVARIABLE(UintPtrT, var_low, Unsigned(IntPtrConstant(0)));
8844   // Only used on 32-bit platforms.
8845   TVARIABLE(UintPtrT, var_high, Unsigned(IntPtrConstant(0)));
8846   Label do_store(this);
8847   GotoIf(WordEqual(length, IntPtrConstant(0)), &do_store);
8848   var_low = LoadBigIntDigit(bigint_value, 0);
8849   if (!Is64()) {
8850     Label load_done(this);
8851     GotoIf(WordEqual(length, IntPtrConstant(1)), &load_done);
8852     var_high = LoadBigIntDigit(bigint_value, 1);
8853     Goto(&load_done);
8854     BIND(&load_done);
8855   }
8856   GotoIf(WordEqual(sign, IntPtrConstant(0)), &do_store);
8857   // Negative value. Simulate two's complement.
8858   if (!Is64()) {
8859     var_high = Unsigned(IntPtrSub(IntPtrConstant(0), var_high.value()));
8860     Label no_carry(this);
8861     GotoIf(WordEqual(var_low.value(), IntPtrConstant(0)), &no_carry);
8862     var_high = Unsigned(IntPtrSub(var_high.value(), IntPtrConstant(1)));
8863     Goto(&no_carry);
8864     BIND(&no_carry);
8865   }
8866   var_low = Unsigned(IntPtrSub(IntPtrConstant(0), var_low.value()));
8867   Goto(&do_store);
8868   BIND(&do_store);
8869 
8870   // Assert that offset < elements.length. Given that it's an offset for a raw
8871   // pointer we correct it by the usual kHeapObjectTag offset.
8872   CSA_ASSERT(
8873       this, IsOffsetInBounds(offset, LoadAndUntagFixedArrayBaseLength(elements),
8874                              kHeapObjectTag, BIGINT64_ELEMENTS));
8875 
8876   MachineRepresentation rep = WordT::kMachineRepresentation;
8877 #if defined(V8_TARGET_BIG_ENDIAN)
8878   if (!Is64()) {
8879     StoreNoWriteBarrier(rep, backing_store, offset, var_high.value());
8880     StoreNoWriteBarrier(rep, backing_store,
8881                         IntPtrAdd(offset, IntPtrConstant(kPointerSize)),
8882                         var_low.value());
8883   } else {
8884     StoreNoWriteBarrier(rep, backing_store, offset, var_low.value());
8885   }
8886 #else
8887   StoreNoWriteBarrier(rep, backing_store, offset, var_low.value());
8888   if (!Is64()) {
8889     StoreNoWriteBarrier(rep, backing_store,
8890                         IntPtrAdd(offset, IntPtrConstant(kPointerSize)),
8891                         var_high.value());
8892   }
8893 #endif
8894 }
8895 
EmitElementStore(Node * object,Node * key,Node * value,bool is_jsarray,ElementsKind elements_kind,KeyedAccessStoreMode store_mode,Label * bailout,Node * context)8896 void CodeStubAssembler::EmitElementStore(Node* object, Node* key, Node* value,
8897                                          bool is_jsarray,
8898                                          ElementsKind elements_kind,
8899                                          KeyedAccessStoreMode store_mode,
8900                                          Label* bailout, Node* context) {
8901   CSA_ASSERT(this, Word32BinaryNot(IsJSProxy(object)));
8902 
8903   Node* elements = LoadElements(object);
8904   if (!IsSmiOrObjectElementsKind(elements_kind)) {
8905     CSA_ASSERT(this, Word32BinaryNot(IsFixedCOWArrayMap(LoadMap(elements))));
8906   } else if (!IsCOWHandlingStoreMode(store_mode)) {
8907     GotoIf(IsFixedCOWArrayMap(LoadMap(elements)), bailout);
8908   }
8909 
8910   // TODO(ishell): introduce TryToIntPtrOrSmi() and use OptimalParameterMode().
8911   ParameterMode parameter_mode = INTPTR_PARAMETERS;
8912   TNode<IntPtrT> intptr_key = TryToIntptr(key, bailout);
8913 
8914   if (IsFixedTypedArrayElementsKind(elements_kind)) {
8915     Label done(this);
8916     // Bounds check.
8917     Node* length = TaggedToParameter(
8918         CAST(LoadObjectField(object, JSTypedArray::kLengthOffset)),
8919         parameter_mode);
8920 
8921     if (store_mode == STORE_NO_TRANSITION_IGNORE_OUT_OF_BOUNDS) {
8922       // Skip the store if we write beyond the length or
8923       // to a property with a negative integer index.
8924       GotoIfNot(UintPtrLessThan(intptr_key, length), &done);
8925     } else {
8926       DCHECK_EQ(STANDARD_STORE, store_mode);
8927       GotoIfNot(UintPtrLessThan(intptr_key, length), bailout);
8928     }
8929 
8930     TNode<Object> value_obj = UncheckedCast<Object>(value);
8931     if (elements_kind == BIGINT64_ELEMENTS ||
8932         elements_kind == BIGUINT64_ELEMENTS) {
8933       EmitBigTypedArrayElementStore(CAST(object), CAST(elements), intptr_key,
8934                                     value_obj, CAST(context), bailout);
8935     } else {
8936       value = PrepareValueForWriteToTypedArray(value_obj, elements_kind,
8937                                                CAST(context));
8938 
8939       // There must be no allocations between the buffer load and
8940       // and the actual store to backing store, because GC may decide that
8941       // the buffer is not alive or move the elements.
8942       // TODO(ishell): introduce DisallowHeapAllocationCode scope here.
8943 
8944       // Check if buffer has been neutered.
8945       Node* buffer = LoadObjectField(object, JSArrayBufferView::kBufferOffset);
8946       GotoIf(IsDetachedBuffer(buffer), bailout);
8947 
8948       Node* backing_store = LoadFixedTypedArrayBackingStore(CAST(elements));
8949       StoreElement(backing_store, elements_kind, intptr_key, value,
8950                    parameter_mode);
8951     }
8952     Goto(&done);
8953 
8954     BIND(&done);
8955     return;
8956   }
8957   DCHECK(IsSmiOrObjectElementsKind(elements_kind) ||
8958          IsDoubleElementsKind(elements_kind));
8959 
8960   Node* length = is_jsarray ? LoadJSArrayLength(object)
8961                             : LoadFixedArrayBaseLength(elements);
8962   length = TaggedToParameter(length, parameter_mode);
8963 
8964   // In case value is stored into a fast smi array, assure that the value is
8965   // a smi before manipulating the backing store. Otherwise the backing store
8966   // may be left in an invalid state.
8967   if (IsSmiElementsKind(elements_kind)) {
8968     GotoIfNot(TaggedIsSmi(value), bailout);
8969   } else if (IsDoubleElementsKind(elements_kind)) {
8970     value = TryTaggedToFloat64(value, bailout);
8971   }
8972 
8973   if (IsGrowStoreMode(store_mode)) {
8974     elements = CheckForCapacityGrow(object, elements, elements_kind, store_mode,
8975                                     length, intptr_key, parameter_mode,
8976                                     is_jsarray, bailout);
8977   } else {
8978     GotoIfNot(UintPtrLessThan(intptr_key, length), bailout);
8979   }
8980 
8981   // If we didn't grow {elements}, it might still be COW, in which case we
8982   // copy it now.
8983   if (!IsSmiOrObjectElementsKind(elements_kind)) {
8984     CSA_ASSERT(this, Word32BinaryNot(IsFixedCOWArrayMap(LoadMap(elements))));
8985   } else if (IsCOWHandlingStoreMode(store_mode)) {
8986     elements = CopyElementsOnWrite(object, elements, elements_kind, length,
8987                                    parameter_mode, bailout);
8988   }
8989 
8990   CSA_ASSERT(this, Word32BinaryNot(IsFixedCOWArrayMap(LoadMap(elements))));
8991   StoreElement(elements, elements_kind, intptr_key, value, parameter_mode);
8992 }
8993 
CheckForCapacityGrow(Node * object,Node * elements,ElementsKind kind,KeyedAccessStoreMode store_mode,Node * length,Node * key,ParameterMode mode,bool is_js_array,Label * bailout)8994 Node* CodeStubAssembler::CheckForCapacityGrow(
8995     Node* object, Node* elements, ElementsKind kind,
8996     KeyedAccessStoreMode store_mode, Node* length, Node* key,
8997     ParameterMode mode, bool is_js_array, Label* bailout) {
8998   VARIABLE(checked_elements, MachineRepresentation::kTagged);
8999   Label grow_case(this), no_grow_case(this), done(this),
9000       grow_bailout(this, Label::kDeferred);
9001 
9002   Node* condition;
9003   if (IsHoleyOrDictionaryElementsKind(kind)) {
9004     condition = UintPtrGreaterThanOrEqual(key, length);
9005   } else {
9006     // We don't support growing here unless the value is being appended.
9007     condition = WordEqual(key, length);
9008   }
9009   Branch(condition, &grow_case, &no_grow_case);
9010 
9011   BIND(&grow_case);
9012   {
9013     Node* current_capacity =
9014         TaggedToParameter(LoadFixedArrayBaseLength(elements), mode);
9015     checked_elements.Bind(elements);
9016     Label fits_capacity(this);
9017     // If key is negative, we will notice in Runtime::kGrowArrayElements.
9018     GotoIf(UintPtrLessThan(key, current_capacity), &fits_capacity);
9019 
9020     {
9021       Node* new_elements = TryGrowElementsCapacity(
9022           object, elements, kind, key, current_capacity, mode, &grow_bailout);
9023       checked_elements.Bind(new_elements);
9024       Goto(&fits_capacity);
9025     }
9026 
9027     BIND(&grow_bailout);
9028     {
9029       Node* tagged_key = mode == SMI_PARAMETERS
9030                              ? key
9031                              : ChangeInt32ToTagged(TruncateIntPtrToInt32(key));
9032       Node* maybe_elements = CallRuntime(
9033           Runtime::kGrowArrayElements, NoContextConstant(), object, tagged_key);
9034       GotoIf(TaggedIsSmi(maybe_elements), bailout);
9035       CSA_ASSERT(this, IsFixedArrayWithKind(maybe_elements, kind));
9036       checked_elements.Bind(maybe_elements);
9037       Goto(&fits_capacity);
9038     }
9039 
9040     BIND(&fits_capacity);
9041     if (is_js_array) {
9042       Node* new_length = IntPtrAdd(key, IntPtrOrSmiConstant(1, mode));
9043       StoreObjectFieldNoWriteBarrier(object, JSArray::kLengthOffset,
9044                                      ParameterToTagged(new_length, mode));
9045     }
9046     Goto(&done);
9047   }
9048 
9049   BIND(&no_grow_case);
9050   {
9051     GotoIfNot(UintPtrLessThan(key, length), bailout);
9052     checked_elements.Bind(elements);
9053     Goto(&done);
9054   }
9055 
9056   BIND(&done);
9057   return checked_elements.value();
9058 }
9059 
CopyElementsOnWrite(Node * object,Node * elements,ElementsKind kind,Node * length,ParameterMode mode,Label * bailout)9060 Node* CodeStubAssembler::CopyElementsOnWrite(Node* object, Node* elements,
9061                                              ElementsKind kind, Node* length,
9062                                              ParameterMode mode,
9063                                              Label* bailout) {
9064   VARIABLE(new_elements_var, MachineRepresentation::kTagged, elements);
9065   Label done(this);
9066 
9067   GotoIfNot(IsFixedCOWArrayMap(LoadMap(elements)), &done);
9068   {
9069     Node* capacity =
9070         TaggedToParameter(LoadFixedArrayBaseLength(elements), mode);
9071     Node* new_elements = GrowElementsCapacity(object, elements, kind, kind,
9072                                               length, capacity, mode, bailout);
9073     new_elements_var.Bind(new_elements);
9074     Goto(&done);
9075   }
9076 
9077   BIND(&done);
9078   return new_elements_var.value();
9079 }
9080 
TransitionElementsKind(Node * object,Node * map,ElementsKind from_kind,ElementsKind to_kind,bool is_jsarray,Label * bailout)9081 void CodeStubAssembler::TransitionElementsKind(Node* object, Node* map,
9082                                                ElementsKind from_kind,
9083                                                ElementsKind to_kind,
9084                                                bool is_jsarray,
9085                                                Label* bailout) {
9086   DCHECK(!IsHoleyElementsKind(from_kind) || IsHoleyElementsKind(to_kind));
9087   if (AllocationSite::ShouldTrack(from_kind, to_kind)) {
9088     TrapAllocationMemento(object, bailout);
9089   }
9090 
9091   if (!IsSimpleMapChangeTransition(from_kind, to_kind)) {
9092     Comment("Non-simple map transition");
9093     Node* elements = LoadElements(object);
9094 
9095     Label done(this);
9096     GotoIf(WordEqual(elements, EmptyFixedArrayConstant()), &done);
9097 
9098     // TODO(ishell): Use OptimalParameterMode().
9099     ParameterMode mode = INTPTR_PARAMETERS;
9100     Node* elements_length = SmiUntag(LoadFixedArrayBaseLength(elements));
9101     Node* array_length =
9102         is_jsarray ? SmiUntag(LoadFastJSArrayLength(object)) : elements_length;
9103 
9104     CSA_ASSERT(this, WordNotEqual(elements_length, IntPtrConstant(0)));
9105 
9106     GrowElementsCapacity(object, elements, from_kind, to_kind, array_length,
9107                          elements_length, mode, bailout);
9108     Goto(&done);
9109     BIND(&done);
9110   }
9111 
9112   StoreMap(object, map);
9113 }
9114 
TrapAllocationMemento(Node * object,Label * memento_found)9115 void CodeStubAssembler::TrapAllocationMemento(Node* object,
9116                                               Label* memento_found) {
9117   Comment("[ TrapAllocationMemento");
9118   Label no_memento_found(this);
9119   Label top_check(this), map_check(this);
9120 
9121   Node* new_space_top_address = ExternalConstant(
9122       ExternalReference::new_space_allocation_top_address(isolate()));
9123   const int kMementoMapOffset = JSArray::kSize;
9124   const int kMementoLastWordOffset =
9125       kMementoMapOffset + AllocationMemento::kSize - kPointerSize;
9126 
9127   // Bail out if the object is not in new space.
9128   Node* object_word = BitcastTaggedToWord(object);
9129   Node* object_page = PageFromAddress(object_word);
9130   {
9131     Node* page_flags = Load(MachineType::IntPtr(), object_page,
9132                             IntPtrConstant(Page::kFlagsOffset));
9133     GotoIf(WordEqual(WordAnd(page_flags,
9134                              IntPtrConstant(MemoryChunk::kIsInNewSpaceMask)),
9135                      IntPtrConstant(0)),
9136            &no_memento_found);
9137   }
9138 
9139   Node* memento_last_word = IntPtrAdd(
9140       object_word, IntPtrConstant(kMementoLastWordOffset - kHeapObjectTag));
9141   Node* memento_last_word_page = PageFromAddress(memento_last_word);
9142 
9143   Node* new_space_top = Load(MachineType::Pointer(), new_space_top_address);
9144   Node* new_space_top_page = PageFromAddress(new_space_top);
9145 
9146   // If the object is in new space, we need to check whether respective
9147   // potential memento object is on the same page as the current top.
9148   GotoIf(WordEqual(memento_last_word_page, new_space_top_page), &top_check);
9149 
9150   // The object is on a different page than allocation top. Bail out if the
9151   // object sits on the page boundary as no memento can follow and we cannot
9152   // touch the memory following it.
9153   Branch(WordEqual(object_page, memento_last_word_page), &map_check,
9154          &no_memento_found);
9155 
9156   // If top is on the same page as the current object, we need to check whether
9157   // we are below top.
9158   BIND(&top_check);
9159   {
9160     Branch(UintPtrGreaterThanOrEqual(memento_last_word, new_space_top),
9161            &no_memento_found, &map_check);
9162   }
9163 
9164   // Memento map check.
9165   BIND(&map_check);
9166   {
9167     Node* memento_map = LoadObjectField(object, kMementoMapOffset);
9168     Branch(
9169         WordEqual(memento_map, LoadRoot(Heap::kAllocationMementoMapRootIndex)),
9170         memento_found, &no_memento_found);
9171   }
9172   BIND(&no_memento_found);
9173   Comment("] TrapAllocationMemento");
9174 }
9175 
PageFromAddress(Node * address)9176 Node* CodeStubAssembler::PageFromAddress(Node* address) {
9177   return WordAnd(address, IntPtrConstant(~Page::kPageAlignmentMask));
9178 }
9179 
CreateAllocationSiteInFeedbackVector(Node * feedback_vector,Node * slot)9180 TNode<AllocationSite> CodeStubAssembler::CreateAllocationSiteInFeedbackVector(
9181     Node* feedback_vector, Node* slot) {
9182   Node* size = IntPtrConstant(AllocationSite::kSize);
9183   Node* site = Allocate(size, CodeStubAssembler::kPretenured);
9184   StoreMapNoWriteBarrier(site, Heap::kAllocationSiteMapRootIndex);
9185   // Should match AllocationSite::Initialize.
9186   Node* field = UpdateWord<AllocationSite::ElementsKindBits>(
9187       IntPtrConstant(0), IntPtrConstant(GetInitialFastElementsKind()));
9188   StoreObjectFieldNoWriteBarrier(
9189       site, AllocationSite::kTransitionInfoOrBoilerplateOffset, SmiTag(field));
9190 
9191   // Unlike literals, constructed arrays don't have nested sites
9192   Node* zero = SmiConstant(0);
9193   StoreObjectFieldNoWriteBarrier(site, AllocationSite::kNestedSiteOffset, zero);
9194 
9195   // Pretenuring calculation field.
9196   StoreObjectFieldNoWriteBarrier(site, AllocationSite::kPretenureDataOffset,
9197                                  zero);
9198 
9199   // Pretenuring memento creation count field.
9200   StoreObjectFieldNoWriteBarrier(
9201       site, AllocationSite::kPretenureCreateCountOffset, zero);
9202 
9203   // Store an empty fixed array for the code dependency.
9204   StoreObjectFieldRoot(site, AllocationSite::kDependentCodeOffset,
9205                        Heap::kEmptyFixedArrayRootIndex);
9206 
9207   // Link the object to the allocation site list
9208   Node* site_list = ExternalConstant(
9209       ExternalReference::allocation_sites_list_address(isolate()));
9210   Node* next_site = LoadBufferObject(site_list, 0);
9211 
9212   // TODO(mvstanton): This is a store to a weak pointer, which we may want to
9213   // mark as such in order to skip the write barrier, once we have a unified
9214   // system for weakness. For now we decided to keep it like this because having
9215   // an initial write barrier backed store makes this pointer strong until the
9216   // next GC, and allocation sites are designed to survive several GCs anyway.
9217   StoreObjectField(site, AllocationSite::kWeakNextOffset, next_site);
9218   StoreNoWriteBarrier(MachineRepresentation::kTagged, site_list, site);
9219 
9220   StoreFeedbackVectorSlot(feedback_vector, slot, site, UPDATE_WRITE_BARRIER, 0,
9221                           SMI_PARAMETERS);
9222   return CAST(site);
9223 }
9224 
CreateWeakCellInFeedbackVector(Node * feedback_vector,Node * slot,Node * value)9225 Node* CodeStubAssembler::CreateWeakCellInFeedbackVector(Node* feedback_vector,
9226                                                         Node* slot,
9227                                                         Node* value) {
9228   Node* size = IntPtrConstant(WeakCell::kSize);
9229   Node* cell = Allocate(size, CodeStubAssembler::kPretenured);
9230 
9231   // Initialize the WeakCell.
9232   DCHECK(Heap::RootIsImmortalImmovable(Heap::kWeakCellMapRootIndex));
9233   StoreMapNoWriteBarrier(cell, Heap::kWeakCellMapRootIndex);
9234   StoreObjectField(cell, WeakCell::kValueOffset, value);
9235 
9236   // Store the WeakCell in the feedback vector.
9237   StoreFeedbackVectorSlot(feedback_vector, slot, cell);
9238   return cell;
9239 }
9240 
BuildFastLoop(const CodeStubAssembler::VariableList & vars,Node * start_index,Node * end_index,const FastLoopBody & body,int increment,ParameterMode parameter_mode,IndexAdvanceMode advance_mode)9241 Node* CodeStubAssembler::BuildFastLoop(
9242     const CodeStubAssembler::VariableList& vars, Node* start_index,
9243     Node* end_index, const FastLoopBody& body, int increment,
9244     ParameterMode parameter_mode, IndexAdvanceMode advance_mode) {
9245   CSA_SLOW_ASSERT(this, MatchesParameterMode(start_index, parameter_mode));
9246   CSA_SLOW_ASSERT(this, MatchesParameterMode(end_index, parameter_mode));
9247   MachineRepresentation index_rep = (parameter_mode == INTPTR_PARAMETERS)
9248                                         ? MachineType::PointerRepresentation()
9249                                         : MachineRepresentation::kTaggedSigned;
9250   VARIABLE(var, index_rep, start_index);
9251   VariableList vars_copy(vars.begin(), vars.end(), zone());
9252   vars_copy.push_back(&var);
9253   Label loop(this, vars_copy);
9254   Label after_loop(this);
9255   // Introduce an explicit second check of the termination condition before the
9256   // loop that helps turbofan generate better code. If there's only a single
9257   // check, then the CodeStubAssembler forces it to be at the beginning of the
9258   // loop requiring a backwards branch at the end of the loop (it's not possible
9259   // to force the loop header check at the end of the loop and branch forward to
9260   // it from the pre-header). The extra branch is slower in the case that the
9261   // loop actually iterates.
9262   Branch(WordEqual(var.value(), end_index), &after_loop, &loop);
9263   BIND(&loop);
9264   {
9265     if (advance_mode == IndexAdvanceMode::kPre) {
9266       Increment(&var, increment, parameter_mode);
9267     }
9268     body(var.value());
9269     if (advance_mode == IndexAdvanceMode::kPost) {
9270       Increment(&var, increment, parameter_mode);
9271     }
9272     Branch(WordNotEqual(var.value(), end_index), &loop, &after_loop);
9273   }
9274   BIND(&after_loop);
9275   return var.value();
9276 }
9277 
BuildFastFixedArrayForEach(const CodeStubAssembler::VariableList & vars,Node * fixed_array,ElementsKind kind,Node * first_element_inclusive,Node * last_element_exclusive,const FastFixedArrayForEachBody & body,ParameterMode mode,ForEachDirection direction)9278 void CodeStubAssembler::BuildFastFixedArrayForEach(
9279     const CodeStubAssembler::VariableList& vars, Node* fixed_array,
9280     ElementsKind kind, Node* first_element_inclusive,
9281     Node* last_element_exclusive, const FastFixedArrayForEachBody& body,
9282     ParameterMode mode, ForEachDirection direction) {
9283   STATIC_ASSERT(FixedArray::kHeaderSize == FixedDoubleArray::kHeaderSize);
9284   CSA_SLOW_ASSERT(this, MatchesParameterMode(first_element_inclusive, mode));
9285   CSA_SLOW_ASSERT(this, MatchesParameterMode(last_element_exclusive, mode));
9286   CSA_SLOW_ASSERT(this, Word32Or(IsFixedArrayWithKind(fixed_array, kind),
9287                                  IsPropertyArray(fixed_array)));
9288   int32_t first_val;
9289   bool constant_first = ToInt32Constant(first_element_inclusive, first_val);
9290   int32_t last_val;
9291   bool constent_last = ToInt32Constant(last_element_exclusive, last_val);
9292   if (constant_first && constent_last) {
9293     int delta = last_val - first_val;
9294     DCHECK_GE(delta, 0);
9295     if (delta <= kElementLoopUnrollThreshold) {
9296       if (direction == ForEachDirection::kForward) {
9297         for (int i = first_val; i < last_val; ++i) {
9298           Node* index = IntPtrConstant(i);
9299           Node* offset =
9300               ElementOffsetFromIndex(index, kind, INTPTR_PARAMETERS,
9301                                      FixedArray::kHeaderSize - kHeapObjectTag);
9302           body(fixed_array, offset);
9303         }
9304       } else {
9305         for (int i = last_val - 1; i >= first_val; --i) {
9306           Node* index = IntPtrConstant(i);
9307           Node* offset =
9308               ElementOffsetFromIndex(index, kind, INTPTR_PARAMETERS,
9309                                      FixedArray::kHeaderSize - kHeapObjectTag);
9310           body(fixed_array, offset);
9311         }
9312       }
9313       return;
9314     }
9315   }
9316 
9317   Node* start =
9318       ElementOffsetFromIndex(first_element_inclusive, kind, mode,
9319                              FixedArray::kHeaderSize - kHeapObjectTag);
9320   Node* limit =
9321       ElementOffsetFromIndex(last_element_exclusive, kind, mode,
9322                              FixedArray::kHeaderSize - kHeapObjectTag);
9323   if (direction == ForEachDirection::kReverse) std::swap(start, limit);
9324 
9325   int increment = IsDoubleElementsKind(kind) ? kDoubleSize : kPointerSize;
9326   BuildFastLoop(
9327       vars, start, limit,
9328       [fixed_array, &body](Node* offset) { body(fixed_array, offset); },
9329       direction == ForEachDirection::kReverse ? -increment : increment,
9330       INTPTR_PARAMETERS,
9331       direction == ForEachDirection::kReverse ? IndexAdvanceMode::kPre
9332                                               : IndexAdvanceMode::kPost);
9333 }
9334 
GotoIfFixedArraySizeDoesntFitInNewSpace(Node * element_count,Label * doesnt_fit,int base_size,ParameterMode mode)9335 void CodeStubAssembler::GotoIfFixedArraySizeDoesntFitInNewSpace(
9336     Node* element_count, Label* doesnt_fit, int base_size, ParameterMode mode) {
9337   GotoIf(FixedArraySizeDoesntFitInNewSpace(element_count, base_size, mode),
9338          doesnt_fit);
9339 }
9340 
InitializeFieldsWithRoot(Node * object,Node * start_offset,Node * end_offset,Heap::RootListIndex root_index)9341 void CodeStubAssembler::InitializeFieldsWithRoot(
9342     Node* object, Node* start_offset, Node* end_offset,
9343     Heap::RootListIndex root_index) {
9344   CSA_SLOW_ASSERT(this, TaggedIsNotSmi(object));
9345   start_offset = IntPtrAdd(start_offset, IntPtrConstant(-kHeapObjectTag));
9346   end_offset = IntPtrAdd(end_offset, IntPtrConstant(-kHeapObjectTag));
9347   Node* root_value = LoadRoot(root_index);
9348   BuildFastLoop(end_offset, start_offset,
9349                 [this, object, root_value](Node* current) {
9350                   StoreNoWriteBarrier(MachineRepresentation::kTagged, object,
9351                                       current, root_value);
9352                 },
9353                 -kPointerSize, INTPTR_PARAMETERS,
9354                 CodeStubAssembler::IndexAdvanceMode::kPre);
9355 }
9356 
BranchIfNumberRelationalComparison(Operation op,Node * left,Node * right,Label * if_true,Label * if_false)9357 void CodeStubAssembler::BranchIfNumberRelationalComparison(
9358     Operation op, Node* left, Node* right, Label* if_true, Label* if_false) {
9359   CSA_SLOW_ASSERT(this, IsNumber(left));
9360   CSA_SLOW_ASSERT(this, IsNumber(right));
9361 
9362   Label do_float_comparison(this);
9363   TVARIABLE(Float64T, var_left_float);
9364   TVARIABLE(Float64T, var_right_float);
9365 
9366   Label if_left_smi(this), if_left_not_smi(this);
9367   Branch(TaggedIsSmi(left), &if_left_smi, &if_left_not_smi);
9368 
9369   BIND(&if_left_smi);
9370   {
9371     TNode<Smi> smi_left = CAST(left);
9372 
9373     Label if_right_not_smi(this);
9374     GotoIfNot(TaggedIsSmi(right), &if_right_not_smi);
9375     {
9376       TNode<Smi> smi_right = CAST(right);
9377 
9378       // Both {left} and {right} are Smi, so just perform a fast Smi comparison.
9379       switch (op) {
9380         case Operation::kLessThan:
9381           BranchIfSmiLessThan(smi_left, smi_right, if_true, if_false);
9382           break;
9383         case Operation::kLessThanOrEqual:
9384           BranchIfSmiLessThanOrEqual(smi_left, smi_right, if_true, if_false);
9385           break;
9386         case Operation::kGreaterThan:
9387           BranchIfSmiLessThan(smi_right, smi_left, if_true, if_false);
9388           break;
9389         case Operation::kGreaterThanOrEqual:
9390           BranchIfSmiLessThanOrEqual(smi_right, smi_left, if_true, if_false);
9391           break;
9392         default:
9393           UNREACHABLE();
9394       }
9395     }
9396     BIND(&if_right_not_smi);
9397     {
9398       CSA_ASSERT(this, IsHeapNumber(right));
9399       var_left_float = SmiToFloat64(smi_left);
9400       var_right_float = LoadHeapNumberValue(right);
9401       Goto(&do_float_comparison);
9402     }
9403   }
9404 
9405   BIND(&if_left_not_smi);
9406   {
9407     CSA_ASSERT(this, IsHeapNumber(left));
9408     var_left_float = LoadHeapNumberValue(left);
9409 
9410     Label if_right_not_smi(this);
9411     GotoIfNot(TaggedIsSmi(right), &if_right_not_smi);
9412     var_right_float = SmiToFloat64(right);
9413     Goto(&do_float_comparison);
9414 
9415     BIND(&if_right_not_smi);
9416     {
9417       CSA_ASSERT(this, IsHeapNumber(right));
9418       var_right_float = LoadHeapNumberValue(right);
9419       Goto(&do_float_comparison);
9420     }
9421   }
9422 
9423   BIND(&do_float_comparison);
9424   {
9425     switch (op) {
9426       case Operation::kLessThan:
9427         Branch(Float64LessThan(var_left_float.value(), var_right_float.value()),
9428                if_true, if_false);
9429         break;
9430       case Operation::kLessThanOrEqual:
9431         Branch(Float64LessThanOrEqual(var_left_float.value(),
9432                                       var_right_float.value()),
9433                if_true, if_false);
9434         break;
9435       case Operation::kGreaterThan:
9436         Branch(
9437             Float64GreaterThan(var_left_float.value(), var_right_float.value()),
9438             if_true, if_false);
9439         break;
9440       case Operation::kGreaterThanOrEqual:
9441         Branch(Float64GreaterThanOrEqual(var_left_float.value(),
9442                                          var_right_float.value()),
9443                if_true, if_false);
9444         break;
9445       default:
9446         UNREACHABLE();
9447     }
9448   }
9449 }
9450 
GotoIfNumberGreaterThanOrEqual(Node * left,Node * right,Label * if_true)9451 void CodeStubAssembler::GotoIfNumberGreaterThanOrEqual(Node* left, Node* right,
9452                                                        Label* if_true) {
9453   Label if_false(this);
9454   BranchIfNumberRelationalComparison(Operation::kGreaterThanOrEqual, left,
9455                                      right, if_true, &if_false);
9456   BIND(&if_false);
9457 }
9458 
9459 namespace {
Reverse(Operation op)9460 Operation Reverse(Operation op) {
9461   switch (op) {
9462     case Operation::kLessThan:
9463       return Operation::kGreaterThan;
9464     case Operation::kLessThanOrEqual:
9465       return Operation::kGreaterThanOrEqual;
9466     case Operation::kGreaterThan:
9467       return Operation::kLessThan;
9468     case Operation::kGreaterThanOrEqual:
9469       return Operation::kLessThanOrEqual;
9470     default:
9471       break;
9472   }
9473   UNREACHABLE();
9474 }
9475 }  // anonymous namespace
9476 
RelationalComparison(Operation op,Node * left,Node * right,Node * context,Variable * var_type_feedback)9477 Node* CodeStubAssembler::RelationalComparison(Operation op, Node* left,
9478                                               Node* right, Node* context,
9479                                               Variable* var_type_feedback) {
9480   Label return_true(this), return_false(this), do_float_comparison(this),
9481       end(this);
9482   TVARIABLE(Oddball, var_result);  // Actually only "true" or "false".
9483   TVARIABLE(Float64T, var_left_float);
9484   TVARIABLE(Float64T, var_right_float);
9485 
9486   // We might need to loop several times due to ToPrimitive and/or ToNumeric
9487   // conversions.
9488   VARIABLE(var_left, MachineRepresentation::kTagged, left);
9489   VARIABLE(var_right, MachineRepresentation::kTagged, right);
9490   VariableList loop_variable_list({&var_left, &var_right}, zone());
9491   if (var_type_feedback != nullptr) {
9492     // Initialize the type feedback to None. The current feedback is combined
9493     // with the previous feedback.
9494     var_type_feedback->Bind(SmiConstant(CompareOperationFeedback::kNone));
9495     loop_variable_list.push_back(var_type_feedback);
9496   }
9497   Label loop(this, loop_variable_list);
9498   Goto(&loop);
9499   BIND(&loop);
9500   {
9501     left = var_left.value();
9502     right = var_right.value();
9503 
9504     Label if_left_smi(this), if_left_not_smi(this);
9505     Branch(TaggedIsSmi(left), &if_left_smi, &if_left_not_smi);
9506 
9507     BIND(&if_left_smi);
9508     {
9509       TNode<Smi> smi_left = CAST(left);
9510       Label if_right_smi(this), if_right_heapnumber(this),
9511           if_right_bigint(this, Label::kDeferred),
9512           if_right_not_numeric(this, Label::kDeferred);
9513       GotoIf(TaggedIsSmi(right), &if_right_smi);
9514       Node* right_map = LoadMap(right);
9515       GotoIf(IsHeapNumberMap(right_map), &if_right_heapnumber);
9516       Node* right_instance_type = LoadMapInstanceType(right_map);
9517       Branch(IsBigIntInstanceType(right_instance_type), &if_right_bigint,
9518              &if_right_not_numeric);
9519 
9520       BIND(&if_right_smi);
9521       {
9522         TNode<Smi> smi_right = CAST(right);
9523         CombineFeedback(var_type_feedback,
9524                         CompareOperationFeedback::kSignedSmall);
9525         switch (op) {
9526           case Operation::kLessThan:
9527             BranchIfSmiLessThan(smi_left, smi_right, &return_true,
9528                                 &return_false);
9529             break;
9530           case Operation::kLessThanOrEqual:
9531             BranchIfSmiLessThanOrEqual(smi_left, smi_right, &return_true,
9532                                        &return_false);
9533             break;
9534           case Operation::kGreaterThan:
9535             BranchIfSmiLessThan(smi_right, smi_left, &return_true,
9536                                 &return_false);
9537             break;
9538           case Operation::kGreaterThanOrEqual:
9539             BranchIfSmiLessThanOrEqual(smi_right, smi_left, &return_true,
9540                                        &return_false);
9541             break;
9542           default:
9543             UNREACHABLE();
9544         }
9545       }
9546 
9547       BIND(&if_right_heapnumber);
9548       {
9549         CombineFeedback(var_type_feedback, CompareOperationFeedback::kNumber);
9550         var_left_float = SmiToFloat64(smi_left);
9551         var_right_float = LoadHeapNumberValue(right);
9552         Goto(&do_float_comparison);
9553       }
9554 
9555       BIND(&if_right_bigint);
9556       {
9557         OverwriteFeedback(var_type_feedback, CompareOperationFeedback::kAny);
9558         var_result = CAST(CallRuntime(Runtime::kBigIntCompareToNumber,
9559                                       NoContextConstant(),
9560                                       SmiConstant(Reverse(op)), right, left));
9561         Goto(&end);
9562       }
9563 
9564       BIND(&if_right_not_numeric);
9565       {
9566         OverwriteFeedback(var_type_feedback, CompareOperationFeedback::kAny);
9567         // Convert {right} to a Numeric; we don't need to perform the
9568         // dedicated ToPrimitive(right, hint Number) operation, as the
9569         // ToNumeric(right) will by itself already invoke ToPrimitive with
9570         // a Number hint.
9571         var_right.Bind(
9572             CallBuiltin(Builtins::kNonNumberToNumeric, context, right));
9573         Goto(&loop);
9574       }
9575     }
9576 
9577     BIND(&if_left_not_smi);
9578     {
9579       Node* left_map = LoadMap(left);
9580 
9581       Label if_right_smi(this), if_right_not_smi(this);
9582       Branch(TaggedIsSmi(right), &if_right_smi, &if_right_not_smi);
9583 
9584       BIND(&if_right_smi);
9585       {
9586         Label if_left_heapnumber(this), if_left_bigint(this, Label::kDeferred),
9587             if_left_not_numeric(this, Label::kDeferred);
9588         GotoIf(IsHeapNumberMap(left_map), &if_left_heapnumber);
9589         Node* left_instance_type = LoadMapInstanceType(left_map);
9590         Branch(IsBigIntInstanceType(left_instance_type), &if_left_bigint,
9591                &if_left_not_numeric);
9592 
9593         BIND(&if_left_heapnumber);
9594         {
9595           CombineFeedback(var_type_feedback, CompareOperationFeedback::kNumber);
9596           var_left_float = LoadHeapNumberValue(left);
9597           var_right_float = SmiToFloat64(right);
9598           Goto(&do_float_comparison);
9599         }
9600 
9601         BIND(&if_left_bigint);
9602         {
9603           OverwriteFeedback(var_type_feedback, CompareOperationFeedback::kAny);
9604           var_result = CAST(CallRuntime(Runtime::kBigIntCompareToNumber,
9605                                         NoContextConstant(), SmiConstant(op),
9606                                         left, right));
9607           Goto(&end);
9608         }
9609 
9610         BIND(&if_left_not_numeric);
9611         {
9612           OverwriteFeedback(var_type_feedback, CompareOperationFeedback::kAny);
9613           // Convert {left} to a Numeric; we don't need to perform the
9614           // dedicated ToPrimitive(left, hint Number) operation, as the
9615           // ToNumeric(left) will by itself already invoke ToPrimitive with
9616           // a Number hint.
9617           var_left.Bind(
9618               CallBuiltin(Builtins::kNonNumberToNumeric, context, left));
9619           Goto(&loop);
9620         }
9621       }
9622 
9623       BIND(&if_right_not_smi);
9624       {
9625         Node* right_map = LoadMap(right);
9626 
9627         Label if_left_heapnumber(this), if_left_bigint(this, Label::kDeferred),
9628             if_left_string(this), if_left_other(this, Label::kDeferred);
9629         GotoIf(IsHeapNumberMap(left_map), &if_left_heapnumber);
9630         Node* left_instance_type = LoadMapInstanceType(left_map);
9631         GotoIf(IsBigIntInstanceType(left_instance_type), &if_left_bigint);
9632         Branch(IsStringInstanceType(left_instance_type), &if_left_string,
9633                &if_left_other);
9634 
9635         BIND(&if_left_heapnumber);
9636         {
9637           Label if_right_heapnumber(this),
9638               if_right_bigint(this, Label::kDeferred),
9639               if_right_not_numeric(this, Label::kDeferred);
9640           GotoIf(WordEqual(right_map, left_map), &if_right_heapnumber);
9641           Node* right_instance_type = LoadMapInstanceType(right_map);
9642           Branch(IsBigIntInstanceType(right_instance_type), &if_right_bigint,
9643                  &if_right_not_numeric);
9644 
9645           BIND(&if_right_heapnumber);
9646           {
9647             CombineFeedback(var_type_feedback,
9648                             CompareOperationFeedback::kNumber);
9649             var_left_float = LoadHeapNumberValue(left);
9650             var_right_float = LoadHeapNumberValue(right);
9651             Goto(&do_float_comparison);
9652           }
9653 
9654           BIND(&if_right_bigint);
9655           {
9656             OverwriteFeedback(var_type_feedback,
9657                               CompareOperationFeedback::kAny);
9658             var_result = CAST(CallRuntime(
9659                 Runtime::kBigIntCompareToNumber, NoContextConstant(),
9660                 SmiConstant(Reverse(op)), right, left));
9661             Goto(&end);
9662           }
9663 
9664           BIND(&if_right_not_numeric);
9665           {
9666             OverwriteFeedback(var_type_feedback,
9667                               CompareOperationFeedback::kAny);
9668             // Convert {right} to a Numeric; we don't need to perform
9669             // dedicated ToPrimitive(right, hint Number) operation, as the
9670             // ToNumeric(right) will by itself already invoke ToPrimitive with
9671             // a Number hint.
9672             var_right.Bind(
9673                 CallBuiltin(Builtins::kNonNumberToNumeric, context, right));
9674             Goto(&loop);
9675           }
9676         }
9677 
9678         BIND(&if_left_bigint);
9679         {
9680           Label if_right_heapnumber(this), if_right_bigint(this),
9681               if_right_string(this), if_right_other(this);
9682           GotoIf(IsHeapNumberMap(right_map), &if_right_heapnumber);
9683           Node* right_instance_type = LoadMapInstanceType(right_map);
9684           GotoIf(IsBigIntInstanceType(right_instance_type), &if_right_bigint);
9685           Branch(IsStringInstanceType(right_instance_type), &if_right_string,
9686                  &if_right_other);
9687 
9688           BIND(&if_right_heapnumber);
9689           {
9690             OverwriteFeedback(var_type_feedback,
9691                               CompareOperationFeedback::kAny);
9692             var_result = CAST(CallRuntime(Runtime::kBigIntCompareToNumber,
9693                                           NoContextConstant(), SmiConstant(op),
9694                                           left, right));
9695             Goto(&end);
9696           }
9697 
9698           BIND(&if_right_bigint);
9699           {
9700             CombineFeedback(var_type_feedback,
9701                             CompareOperationFeedback::kBigInt);
9702             var_result = CAST(CallRuntime(Runtime::kBigIntCompareToBigInt,
9703                                           NoContextConstant(), SmiConstant(op),
9704                                           left, right));
9705             Goto(&end);
9706           }
9707 
9708           BIND(&if_right_string);
9709           {
9710             OverwriteFeedback(var_type_feedback,
9711                               CompareOperationFeedback::kAny);
9712             var_result = CAST(CallRuntime(Runtime::kBigIntCompareToString,
9713                                           NoContextConstant(), SmiConstant(op),
9714                                           left, right));
9715             Goto(&end);
9716           }
9717 
9718           // {right} is not a Number, BigInt, or String.
9719           BIND(&if_right_other);
9720           {
9721             OverwriteFeedback(var_type_feedback,
9722                               CompareOperationFeedback::kAny);
9723             // Convert {right} to a Numeric; we don't need to perform
9724             // dedicated ToPrimitive(right, hint Number) operation, as the
9725             // ToNumeric(right) will by itself already invoke ToPrimitive with
9726             // a Number hint.
9727             var_right.Bind(
9728                 CallBuiltin(Builtins::kNonNumberToNumeric, context, right));
9729             Goto(&loop);
9730           }
9731         }
9732 
9733         BIND(&if_left_string);
9734         {
9735           Node* right_instance_type = LoadMapInstanceType(right_map);
9736 
9737           Label if_right_not_string(this, Label::kDeferred);
9738           GotoIfNot(IsStringInstanceType(right_instance_type),
9739                     &if_right_not_string);
9740 
9741           // Both {left} and {right} are strings.
9742           CombineFeedback(var_type_feedback, CompareOperationFeedback::kString);
9743           Builtins::Name builtin;
9744           switch (op) {
9745             case Operation::kLessThan:
9746               builtin = Builtins::kStringLessThan;
9747               break;
9748             case Operation::kLessThanOrEqual:
9749               builtin = Builtins::kStringLessThanOrEqual;
9750               break;
9751             case Operation::kGreaterThan:
9752               builtin = Builtins::kStringGreaterThan;
9753               break;
9754             case Operation::kGreaterThanOrEqual:
9755               builtin = Builtins::kStringGreaterThanOrEqual;
9756               break;
9757             default:
9758               UNREACHABLE();
9759           }
9760           var_result = CAST(CallBuiltin(builtin, context, left, right));
9761           Goto(&end);
9762 
9763           BIND(&if_right_not_string);
9764           {
9765             OverwriteFeedback(var_type_feedback,
9766                               CompareOperationFeedback::kAny);
9767             // {left} is a String, while {right} isn't. Check if {right} is
9768             // a BigInt, otherwise call ToPrimitive(right, hint Number) if
9769             // {right} is a receiver, or ToNumeric(left) and then
9770             // ToNumeric(right) in the other cases.
9771             STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
9772             Label if_right_bigint(this),
9773                 if_right_receiver(this, Label::kDeferred);
9774             GotoIf(IsBigIntInstanceType(right_instance_type), &if_right_bigint);
9775             GotoIf(IsJSReceiverInstanceType(right_instance_type),
9776                    &if_right_receiver);
9777 
9778             var_left.Bind(
9779                 CallBuiltin(Builtins::kNonNumberToNumeric, context, left));
9780             var_right.Bind(CallBuiltin(Builtins::kToNumeric, context, right));
9781             Goto(&loop);
9782 
9783             BIND(&if_right_bigint);
9784             {
9785               var_result = CAST(CallRuntime(
9786                   Runtime::kBigIntCompareToString, NoContextConstant(),
9787                   SmiConstant(Reverse(op)), right, left));
9788               Goto(&end);
9789             }
9790 
9791             BIND(&if_right_receiver);
9792             {
9793               Callable callable = CodeFactory::NonPrimitiveToPrimitive(
9794                   isolate(), ToPrimitiveHint::kNumber);
9795               var_right.Bind(CallStub(callable, context, right));
9796               Goto(&loop);
9797             }
9798           }
9799         }
9800 
9801         BIND(&if_left_other);
9802         {
9803           // {left} is neither a Numeric nor a String, and {right} is not a Smi.
9804           if (var_type_feedback != nullptr) {
9805             // Collect NumberOrOddball feedback if {left} is an Oddball
9806             // and {right} is either a HeapNumber or Oddball. Otherwise collect
9807             // Any feedback.
9808             Label collect_any_feedback(this), collect_oddball_feedback(this),
9809                 collect_feedback_done(this);
9810             GotoIfNot(InstanceTypeEqual(left_instance_type, ODDBALL_TYPE),
9811                       &collect_any_feedback);
9812 
9813             GotoIf(IsHeapNumberMap(right_map), &collect_oddball_feedback);
9814             Node* right_instance_type = LoadMapInstanceType(right_map);
9815             Branch(InstanceTypeEqual(right_instance_type, ODDBALL_TYPE),
9816                    &collect_oddball_feedback, &collect_any_feedback);
9817 
9818             BIND(&collect_oddball_feedback);
9819             {
9820               CombineFeedback(var_type_feedback,
9821                               CompareOperationFeedback::kNumberOrOddball);
9822               Goto(&collect_feedback_done);
9823             }
9824 
9825             BIND(&collect_any_feedback);
9826             {
9827               OverwriteFeedback(var_type_feedback,
9828                                 CompareOperationFeedback::kAny);
9829               Goto(&collect_feedback_done);
9830             }
9831 
9832             BIND(&collect_feedback_done);
9833           }
9834 
9835           // If {left} is a receiver, call ToPrimitive(left, hint Number).
9836           // Otherwise call ToNumeric(left) and then ToNumeric(right).
9837           STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
9838           Label if_left_receiver(this, Label::kDeferred);
9839           GotoIf(IsJSReceiverInstanceType(left_instance_type),
9840                  &if_left_receiver);
9841 
9842           var_left.Bind(
9843               CallBuiltin(Builtins::kNonNumberToNumeric, context, left));
9844           var_right.Bind(CallBuiltin(Builtins::kToNumeric, context, right));
9845           Goto(&loop);
9846 
9847           BIND(&if_left_receiver);
9848           {
9849             Callable callable = CodeFactory::NonPrimitiveToPrimitive(
9850                 isolate(), ToPrimitiveHint::kNumber);
9851             var_left.Bind(CallStub(callable, context, left));
9852             Goto(&loop);
9853           }
9854         }
9855       }
9856     }
9857   }
9858 
9859   BIND(&do_float_comparison);
9860   {
9861     switch (op) {
9862       case Operation::kLessThan:
9863         Branch(Float64LessThan(var_left_float.value(), var_right_float.value()),
9864                &return_true, &return_false);
9865         break;
9866       case Operation::kLessThanOrEqual:
9867         Branch(Float64LessThanOrEqual(var_left_float.value(),
9868                                       var_right_float.value()),
9869                &return_true, &return_false);
9870         break;
9871       case Operation::kGreaterThan:
9872         Branch(
9873             Float64GreaterThan(var_left_float.value(), var_right_float.value()),
9874             &return_true, &return_false);
9875         break;
9876       case Operation::kGreaterThanOrEqual:
9877         Branch(Float64GreaterThanOrEqual(var_left_float.value(),
9878                                          var_right_float.value()),
9879                &return_true, &return_false);
9880         break;
9881       default:
9882         UNREACHABLE();
9883     }
9884   }
9885 
9886   BIND(&return_true);
9887   {
9888     var_result = TrueConstant();
9889     Goto(&end);
9890   }
9891 
9892   BIND(&return_false);
9893   {
9894     var_result = FalseConstant();
9895     Goto(&end);
9896   }
9897 
9898   BIND(&end);
9899   return var_result.value();
9900 }
9901 
CollectFeedbackForString(SloppyTNode<Int32T> instance_type)9902 TNode<Smi> CodeStubAssembler::CollectFeedbackForString(
9903     SloppyTNode<Int32T> instance_type) {
9904   TNode<Smi> feedback = SelectSmiConstant(
9905       Word32Equal(
9906           Word32And(instance_type, Int32Constant(kIsNotInternalizedMask)),
9907           Int32Constant(kInternalizedTag)),
9908       CompareOperationFeedback::kInternalizedString,
9909       CompareOperationFeedback::kString);
9910   return feedback;
9911 }
9912 
GenerateEqual_Same(Node * value,Label * if_equal,Label * if_notequal,Variable * var_type_feedback)9913 void CodeStubAssembler::GenerateEqual_Same(Node* value, Label* if_equal,
9914                                            Label* if_notequal,
9915                                            Variable* var_type_feedback) {
9916   // In case of abstract or strict equality checks, we need additional checks
9917   // for NaN values because they are not considered equal, even if both the
9918   // left and the right hand side reference exactly the same value.
9919 
9920   Label if_smi(this), if_heapnumber(this);
9921   GotoIf(TaggedIsSmi(value), &if_smi);
9922 
9923   Node* value_map = LoadMap(value);
9924   GotoIf(IsHeapNumberMap(value_map), &if_heapnumber);
9925 
9926   // For non-HeapNumbers, all we do is collect type feedback.
9927   if (var_type_feedback != nullptr) {
9928     Node* instance_type = LoadMapInstanceType(value_map);
9929 
9930     Label if_string(this), if_receiver(this), if_symbol(this), if_bigint(this),
9931         if_other(this, Label::kDeferred);
9932     GotoIf(IsStringInstanceType(instance_type), &if_string);
9933     GotoIf(IsJSReceiverInstanceType(instance_type), &if_receiver);
9934     GotoIf(IsBigIntInstanceType(instance_type), &if_bigint);
9935     Branch(IsSymbolInstanceType(instance_type), &if_symbol, &if_other);
9936 
9937     BIND(&if_string);
9938     {
9939       CombineFeedback(var_type_feedback,
9940                       CollectFeedbackForString(instance_type));
9941       Goto(if_equal);
9942     }
9943 
9944     BIND(&if_symbol);
9945     {
9946       CombineFeedback(var_type_feedback, CompareOperationFeedback::kSymbol);
9947       Goto(if_equal);
9948     }
9949 
9950     BIND(&if_receiver);
9951     {
9952       CombineFeedback(var_type_feedback, CompareOperationFeedback::kReceiver);
9953       Goto(if_equal);
9954     }
9955 
9956     BIND(&if_bigint);
9957     {
9958       CombineFeedback(var_type_feedback, CompareOperationFeedback::kBigInt);
9959       Goto(if_equal);
9960     }
9961 
9962     BIND(&if_other);
9963     {
9964       CombineFeedback(var_type_feedback, CompareOperationFeedback::kAny);
9965       Goto(if_equal);
9966     }
9967   } else {
9968     Goto(if_equal);
9969   }
9970 
9971   BIND(&if_heapnumber);
9972   {
9973     CombineFeedback(var_type_feedback, CompareOperationFeedback::kNumber);
9974     Node* number_value = LoadHeapNumberValue(value);
9975     BranchIfFloat64IsNaN(number_value, if_notequal, if_equal);
9976   }
9977 
9978   BIND(&if_smi);
9979   {
9980     CombineFeedback(var_type_feedback, CompareOperationFeedback::kSignedSmall);
9981     Goto(if_equal);
9982   }
9983 }
9984 
9985 // ES6 section 7.2.12 Abstract Equality Comparison
Equal(Node * left,Node * right,Node * context,Variable * var_type_feedback)9986 Node* CodeStubAssembler::Equal(Node* left, Node* right, Node* context,
9987                                Variable* var_type_feedback) {
9988   // This is a slightly optimized version of Object::Equals. Whenever you
9989   // change something functionality wise in here, remember to update the
9990   // Object::Equals method as well.
9991 
9992   Label if_equal(this), if_notequal(this), do_float_comparison(this),
9993       do_right_stringtonumber(this, Label::kDeferred), end(this);
9994   VARIABLE(result, MachineRepresentation::kTagged);
9995   TVARIABLE(Float64T, var_left_float);
9996   TVARIABLE(Float64T, var_right_float);
9997 
9998   // We can avoid code duplication by exploiting the fact that abstract equality
9999   // is symmetric.
10000   Label use_symmetry(this);
10001 
10002   // We might need to loop several times due to ToPrimitive and/or ToNumber
10003   // conversions.
10004   VARIABLE(var_left, MachineRepresentation::kTagged, left);
10005   VARIABLE(var_right, MachineRepresentation::kTagged, right);
10006   VariableList loop_variable_list({&var_left, &var_right}, zone());
10007   if (var_type_feedback != nullptr) {
10008     // Initialize the type feedback to None. The current feedback will be
10009     // combined with the previous feedback.
10010     OverwriteFeedback(var_type_feedback, CompareOperationFeedback::kNone);
10011     loop_variable_list.push_back(var_type_feedback);
10012   }
10013   Label loop(this, loop_variable_list);
10014   Goto(&loop);
10015   BIND(&loop);
10016   {
10017     left = var_left.value();
10018     right = var_right.value();
10019 
10020     Label if_notsame(this);
10021     GotoIf(WordNotEqual(left, right), &if_notsame);
10022     {
10023       // {left} and {right} reference the exact same value, yet we need special
10024       // treatment for HeapNumber, as NaN is not equal to NaN.
10025       GenerateEqual_Same(left, &if_equal, &if_notequal, var_type_feedback);
10026     }
10027 
10028     BIND(&if_notsame);
10029     Label if_left_smi(this), if_left_not_smi(this);
10030     Branch(TaggedIsSmi(left), &if_left_smi, &if_left_not_smi);
10031 
10032     BIND(&if_left_smi);
10033     {
10034       Label if_right_smi(this), if_right_not_smi(this);
10035       Branch(TaggedIsSmi(right), &if_right_smi, &if_right_not_smi);
10036 
10037       BIND(&if_right_smi);
10038       {
10039         // We have already checked for {left} and {right} being the same value,
10040         // so when we get here they must be different Smis.
10041         CombineFeedback(var_type_feedback,
10042                         CompareOperationFeedback::kSignedSmall);
10043         Goto(&if_notequal);
10044       }
10045 
10046       BIND(&if_right_not_smi);
10047       Node* right_map = LoadMap(right);
10048       Label if_right_heapnumber(this), if_right_boolean(this),
10049           if_right_bigint(this, Label::kDeferred),
10050           if_right_receiver(this, Label::kDeferred);
10051       GotoIf(IsHeapNumberMap(right_map), &if_right_heapnumber);
10052       // {left} is Smi and {right} is not HeapNumber or Smi.
10053       if (var_type_feedback != nullptr) {
10054         var_type_feedback->Bind(SmiConstant(CompareOperationFeedback::kAny));
10055       }
10056       GotoIf(IsBooleanMap(right_map), &if_right_boolean);
10057       Node* right_type = LoadMapInstanceType(right_map);
10058       GotoIf(IsStringInstanceType(right_type), &do_right_stringtonumber);
10059       GotoIf(IsBigIntInstanceType(right_type), &if_right_bigint);
10060       Branch(IsJSReceiverInstanceType(right_type), &if_right_receiver,
10061              &if_notequal);
10062 
10063       BIND(&if_right_heapnumber);
10064       {
10065         var_left_float = SmiToFloat64(left);
10066         var_right_float = LoadHeapNumberValue(right);
10067         CombineFeedback(var_type_feedback, CompareOperationFeedback::kNumber);
10068         Goto(&do_float_comparison);
10069       }
10070 
10071       BIND(&if_right_boolean);
10072       {
10073         var_right.Bind(LoadObjectField(right, Oddball::kToNumberOffset));
10074         Goto(&loop);
10075       }
10076 
10077       BIND(&if_right_bigint);
10078       {
10079         result.Bind(CallRuntime(Runtime::kBigIntEqualToNumber,
10080                                 NoContextConstant(), right, left));
10081         Goto(&end);
10082       }
10083 
10084       BIND(&if_right_receiver);
10085       {
10086         Callable callable = CodeFactory::NonPrimitiveToPrimitive(isolate());
10087         var_right.Bind(CallStub(callable, context, right));
10088         Goto(&loop);
10089       }
10090     }
10091 
10092     BIND(&if_left_not_smi);
10093     {
10094       GotoIf(TaggedIsSmi(right), &use_symmetry);
10095 
10096       Label if_left_symbol(this), if_left_number(this), if_left_string(this),
10097           if_left_bigint(this, Label::kDeferred), if_left_oddball(this),
10098           if_left_receiver(this);
10099 
10100       Node* left_map = LoadMap(left);
10101       Node* right_map = LoadMap(right);
10102       Node* left_type = LoadMapInstanceType(left_map);
10103       Node* right_type = LoadMapInstanceType(right_map);
10104 
10105       GotoIf(Int32LessThan(left_type, Int32Constant(FIRST_NONSTRING_TYPE)),
10106              &if_left_string);
10107       GotoIf(InstanceTypeEqual(left_type, SYMBOL_TYPE), &if_left_symbol);
10108       GotoIf(InstanceTypeEqual(left_type, HEAP_NUMBER_TYPE), &if_left_number);
10109       GotoIf(InstanceTypeEqual(left_type, ODDBALL_TYPE), &if_left_oddball);
10110       GotoIf(InstanceTypeEqual(left_type, BIGINT_TYPE), &if_left_bigint);
10111       Goto(&if_left_receiver);
10112 
10113       BIND(&if_left_string);
10114       {
10115         GotoIfNot(IsStringInstanceType(right_type), &use_symmetry);
10116         result.Bind(CallBuiltin(Builtins::kStringEqual, context, left, right));
10117         CombineFeedback(var_type_feedback,
10118                         SmiOr(CollectFeedbackForString(left_type),
10119                               CollectFeedbackForString(right_type)));
10120         Goto(&end);
10121       }
10122 
10123       BIND(&if_left_number);
10124       {
10125         Label if_right_not_number(this);
10126         GotoIf(Word32NotEqual(left_type, right_type), &if_right_not_number);
10127 
10128         var_left_float = LoadHeapNumberValue(left);
10129         var_right_float = LoadHeapNumberValue(right);
10130         CombineFeedback(var_type_feedback, CompareOperationFeedback::kNumber);
10131         Goto(&do_float_comparison);
10132 
10133         BIND(&if_right_not_number);
10134         {
10135           Label if_right_boolean(this);
10136           if (var_type_feedback != nullptr) {
10137             var_type_feedback->Bind(
10138                 SmiConstant(CompareOperationFeedback::kAny));
10139           }
10140           GotoIf(IsStringInstanceType(right_type), &do_right_stringtonumber);
10141           GotoIf(IsBooleanMap(right_map), &if_right_boolean);
10142           GotoIf(IsBigIntInstanceType(right_type), &use_symmetry);
10143           Branch(IsJSReceiverInstanceType(right_type), &use_symmetry,
10144                  &if_notequal);
10145 
10146           BIND(&if_right_boolean);
10147           {
10148             var_right.Bind(LoadObjectField(right, Oddball::kToNumberOffset));
10149             Goto(&loop);
10150           }
10151         }
10152       }
10153 
10154       BIND(&if_left_bigint);
10155       {
10156         Label if_right_heapnumber(this), if_right_bigint(this),
10157             if_right_string(this), if_right_boolean(this);
10158         GotoIf(IsHeapNumberMap(right_map), &if_right_heapnumber);
10159         GotoIf(IsBigIntInstanceType(right_type), &if_right_bigint);
10160         GotoIf(IsStringInstanceType(right_type), &if_right_string);
10161         GotoIf(IsBooleanMap(right_map), &if_right_boolean);
10162         Branch(IsJSReceiverInstanceType(right_type), &use_symmetry,
10163                &if_notequal);
10164 
10165         BIND(&if_right_heapnumber);
10166         {
10167           if (var_type_feedback != nullptr) {
10168             var_type_feedback->Bind(
10169                 SmiConstant(CompareOperationFeedback::kAny));
10170           }
10171           result.Bind(CallRuntime(Runtime::kBigIntEqualToNumber,
10172                                   NoContextConstant(), left, right));
10173           Goto(&end);
10174         }
10175 
10176         BIND(&if_right_bigint);
10177         {
10178           CombineFeedback(var_type_feedback, CompareOperationFeedback::kBigInt);
10179           result.Bind(CallRuntime(Runtime::kBigIntEqualToBigInt,
10180                                   NoContextConstant(), left, right));
10181           Goto(&end);
10182         }
10183 
10184         BIND(&if_right_string);
10185         {
10186           if (var_type_feedback != nullptr) {
10187             var_type_feedback->Bind(
10188                 SmiConstant(CompareOperationFeedback::kAny));
10189           }
10190           result.Bind(CallRuntime(Runtime::kBigIntEqualToString,
10191                                   NoContextConstant(), left, right));
10192           Goto(&end);
10193         }
10194 
10195         BIND(&if_right_boolean);
10196         {
10197           if (var_type_feedback != nullptr) {
10198             var_type_feedback->Bind(
10199                 SmiConstant(CompareOperationFeedback::kAny));
10200           }
10201           var_right.Bind(LoadObjectField(right, Oddball::kToNumberOffset));
10202           Goto(&loop);
10203         }
10204       }
10205 
10206       BIND(&if_left_oddball);
10207       {
10208         if (var_type_feedback != nullptr) {
10209           var_type_feedback->Bind(SmiConstant(CompareOperationFeedback::kAny));
10210         }
10211 
10212         Label if_left_boolean(this);
10213         GotoIf(IsBooleanMap(left_map), &if_left_boolean);
10214         // {left} is either Null or Undefined. Check if {right} is
10215         // undetectable (which includes Null and Undefined).
10216         Branch(IsUndetectableMap(right_map), &if_equal, &if_notequal);
10217 
10218         BIND(&if_left_boolean);
10219         {
10220           // If {right} is a Boolean too, it must be a different Boolean.
10221           GotoIf(WordEqual(right_map, left_map), &if_notequal);
10222           // Otherwise, convert {left} to number and try again.
10223           var_left.Bind(LoadObjectField(left, Oddball::kToNumberOffset));
10224           Goto(&loop);
10225         }
10226       }
10227 
10228       BIND(&if_left_symbol);
10229       {
10230         Label if_right_receiver(this);
10231         GotoIf(IsJSReceiverInstanceType(right_type), &if_right_receiver);
10232         // {right} is not a JSReceiver and also not the same Symbol as {left},
10233         // so the result is "not equal".
10234         if (var_type_feedback != nullptr) {
10235           Label if_right_symbol(this);
10236           GotoIf(IsSymbolInstanceType(right_type), &if_right_symbol);
10237           var_type_feedback->Bind(SmiConstant(CompareOperationFeedback::kAny));
10238           Goto(&if_notequal);
10239 
10240           BIND(&if_right_symbol);
10241           {
10242             CombineFeedback(var_type_feedback,
10243                             CompareOperationFeedback::kSymbol);
10244             Goto(&if_notequal);
10245           }
10246         } else {
10247           Goto(&if_notequal);
10248         }
10249 
10250         BIND(&if_right_receiver);
10251         {
10252           // {left} is a Primitive and {right} is a JSReceiver, so swapping
10253           // the order is not observable.
10254           if (var_type_feedback != nullptr) {
10255             var_type_feedback->Bind(
10256                 SmiConstant(CompareOperationFeedback::kAny));
10257           }
10258           Goto(&use_symmetry);
10259         }
10260       }
10261 
10262       BIND(&if_left_receiver);
10263       {
10264         CSA_ASSERT(this, IsJSReceiverInstanceType(left_type));
10265         Label if_right_not_receiver(this);
10266         GotoIfNot(IsJSReceiverInstanceType(right_type), &if_right_not_receiver);
10267 
10268         // {left} and {right} are different JSReceiver references.
10269         CombineFeedback(var_type_feedback, CompareOperationFeedback::kReceiver);
10270         Goto(&if_notequal);
10271 
10272         BIND(&if_right_not_receiver);
10273         {
10274           if (var_type_feedback != nullptr) {
10275             var_type_feedback->Bind(
10276                 SmiConstant(CompareOperationFeedback::kAny));
10277           }
10278           Label if_right_null_or_undefined(this);
10279           GotoIf(IsUndetectableMap(right_map), &if_right_null_or_undefined);
10280 
10281           // {right} is a Primitive; convert {left} to Primitive too.
10282           Callable callable = CodeFactory::NonPrimitiveToPrimitive(isolate());
10283           var_left.Bind(CallStub(callable, context, left));
10284           Goto(&loop);
10285 
10286           BIND(&if_right_null_or_undefined);
10287           Branch(IsUndetectableMap(left_map), &if_equal, &if_notequal);
10288         }
10289       }
10290     }
10291 
10292     BIND(&do_right_stringtonumber);
10293     {
10294       var_right.Bind(CallBuiltin(Builtins::kStringToNumber, context, right));
10295       Goto(&loop);
10296     }
10297 
10298     BIND(&use_symmetry);
10299     {
10300       var_left.Bind(right);
10301       var_right.Bind(left);
10302       Goto(&loop);
10303     }
10304   }
10305 
10306   BIND(&do_float_comparison);
10307   {
10308     Branch(Float64Equal(var_left_float.value(), var_right_float.value()),
10309            &if_equal, &if_notequal);
10310   }
10311 
10312   BIND(&if_equal);
10313   {
10314     result.Bind(TrueConstant());
10315     Goto(&end);
10316   }
10317 
10318   BIND(&if_notequal);
10319   {
10320     result.Bind(FalseConstant());
10321     Goto(&end);
10322   }
10323 
10324   BIND(&end);
10325   return result.value();
10326 }
10327 
StrictEqual(Node * lhs,Node * rhs,Variable * var_type_feedback)10328 Node* CodeStubAssembler::StrictEqual(Node* lhs, Node* rhs,
10329                                      Variable* var_type_feedback) {
10330   // Pseudo-code for the algorithm below:
10331   //
10332   // if (lhs == rhs) {
10333   //   if (lhs->IsHeapNumber()) return HeapNumber::cast(lhs)->value() != NaN;
10334   //   return true;
10335   // }
10336   // if (!lhs->IsSmi()) {
10337   //   if (lhs->IsHeapNumber()) {
10338   //     if (rhs->IsSmi()) {
10339   //       return Smi::ToInt(rhs) == HeapNumber::cast(lhs)->value();
10340   //     } else if (rhs->IsHeapNumber()) {
10341   //       return HeapNumber::cast(rhs)->value() ==
10342   //       HeapNumber::cast(lhs)->value();
10343   //     } else {
10344   //       return false;
10345   //     }
10346   //   } else {
10347   //     if (rhs->IsSmi()) {
10348   //       return false;
10349   //     } else {
10350   //       if (lhs->IsString()) {
10351   //         if (rhs->IsString()) {
10352   //           return %StringEqual(lhs, rhs);
10353   //         } else {
10354   //           return false;
10355   //         }
10356   //       } else if (lhs->IsBigInt()) {
10357   //         if (rhs->IsBigInt()) {
10358   //           return %BigIntEqualToBigInt(lhs, rhs);
10359   //         } else {
10360   //           return false;
10361   //         }
10362   //       } else {
10363   //         return false;
10364   //       }
10365   //     }
10366   //   }
10367   // } else {
10368   //   if (rhs->IsSmi()) {
10369   //     return false;
10370   //   } else {
10371   //     if (rhs->IsHeapNumber()) {
10372   //       return Smi::ToInt(lhs) == HeapNumber::cast(rhs)->value();
10373   //     } else {
10374   //       return false;
10375   //     }
10376   //   }
10377   // }
10378 
10379   Label if_equal(this), if_notequal(this), end(this);
10380   VARIABLE(result, MachineRepresentation::kTagged);
10381 
10382   // Check if {lhs} and {rhs} refer to the same object.
10383   Label if_same(this), if_notsame(this);
10384   Branch(WordEqual(lhs, rhs), &if_same, &if_notsame);
10385 
10386   BIND(&if_same);
10387   {
10388     // The {lhs} and {rhs} reference the exact same value, yet we need special
10389     // treatment for HeapNumber, as NaN is not equal to NaN.
10390     if (var_type_feedback != nullptr) {
10391       var_type_feedback->Bind(SmiConstant(CompareOperationFeedback::kNone));
10392     }
10393     GenerateEqual_Same(lhs, &if_equal, &if_notequal, var_type_feedback);
10394   }
10395 
10396   BIND(&if_notsame);
10397   {
10398     // The {lhs} and {rhs} reference different objects, yet for Smi, HeapNumber,
10399     // BigInt and String they can still be considered equal.
10400 
10401     if (var_type_feedback != nullptr) {
10402       var_type_feedback->Bind(SmiConstant(CompareOperationFeedback::kAny));
10403     }
10404 
10405     // Check if {lhs} is a Smi or a HeapObject.
10406     Label if_lhsissmi(this), if_lhsisnotsmi(this);
10407     Branch(TaggedIsSmi(lhs), &if_lhsissmi, &if_lhsisnotsmi);
10408 
10409     BIND(&if_lhsisnotsmi);
10410     {
10411       // Load the map of {lhs}.
10412       Node* lhs_map = LoadMap(lhs);
10413 
10414       // Check if {lhs} is a HeapNumber.
10415       Label if_lhsisnumber(this), if_lhsisnotnumber(this);
10416       Branch(IsHeapNumberMap(lhs_map), &if_lhsisnumber, &if_lhsisnotnumber);
10417 
10418       BIND(&if_lhsisnumber);
10419       {
10420         // Check if {rhs} is a Smi or a HeapObject.
10421         Label if_rhsissmi(this), if_rhsisnotsmi(this);
10422         Branch(TaggedIsSmi(rhs), &if_rhsissmi, &if_rhsisnotsmi);
10423 
10424         BIND(&if_rhsissmi);
10425         {
10426           // Convert {lhs} and {rhs} to floating point values.
10427           Node* lhs_value = LoadHeapNumberValue(lhs);
10428           Node* rhs_value = SmiToFloat64(rhs);
10429 
10430           if (var_type_feedback != nullptr) {
10431             var_type_feedback->Bind(
10432                 SmiConstant(CompareOperationFeedback::kNumber));
10433           }
10434 
10435           // Perform a floating point comparison of {lhs} and {rhs}.
10436           Branch(Float64Equal(lhs_value, rhs_value), &if_equal, &if_notequal);
10437         }
10438 
10439         BIND(&if_rhsisnotsmi);
10440         {
10441           // Load the map of {rhs}.
10442           Node* rhs_map = LoadMap(rhs);
10443 
10444           // Check if {rhs} is also a HeapNumber.
10445           Label if_rhsisnumber(this), if_rhsisnotnumber(this);
10446           Branch(IsHeapNumberMap(rhs_map), &if_rhsisnumber, &if_rhsisnotnumber);
10447 
10448           BIND(&if_rhsisnumber);
10449           {
10450             // Convert {lhs} and {rhs} to floating point values.
10451             Node* lhs_value = LoadHeapNumberValue(lhs);
10452             Node* rhs_value = LoadHeapNumberValue(rhs);
10453 
10454             if (var_type_feedback != nullptr) {
10455               var_type_feedback->Bind(
10456                   SmiConstant(CompareOperationFeedback::kNumber));
10457             }
10458 
10459             // Perform a floating point comparison of {lhs} and {rhs}.
10460             Branch(Float64Equal(lhs_value, rhs_value), &if_equal, &if_notequal);
10461           }
10462 
10463           BIND(&if_rhsisnotnumber);
10464           Goto(&if_notequal);
10465         }
10466       }
10467 
10468       BIND(&if_lhsisnotnumber);
10469       {
10470         // Check if {rhs} is a Smi or a HeapObject.
10471         Label if_rhsissmi(this), if_rhsisnotsmi(this);
10472         Branch(TaggedIsSmi(rhs), &if_rhsissmi, &if_rhsisnotsmi);
10473 
10474         BIND(&if_rhsissmi);
10475         Goto(&if_notequal);
10476 
10477         BIND(&if_rhsisnotsmi);
10478         {
10479           // Load the instance type of {lhs}.
10480           Node* lhs_instance_type = LoadMapInstanceType(lhs_map);
10481 
10482           // Check if {lhs} is a String.
10483           Label if_lhsisstring(this), if_lhsisnotstring(this);
10484           Branch(IsStringInstanceType(lhs_instance_type), &if_lhsisstring,
10485                  &if_lhsisnotstring);
10486 
10487           BIND(&if_lhsisstring);
10488           {
10489             // Load the instance type of {rhs}.
10490             Node* rhs_instance_type = LoadInstanceType(rhs);
10491 
10492             // Check if {rhs} is also a String.
10493             Label if_rhsisstring(this, Label::kDeferred),
10494                 if_rhsisnotstring(this);
10495             Branch(IsStringInstanceType(rhs_instance_type), &if_rhsisstring,
10496                    &if_rhsisnotstring);
10497 
10498             BIND(&if_rhsisstring);
10499             {
10500               if (var_type_feedback != nullptr) {
10501                 TNode<Smi> lhs_feedback =
10502                     CollectFeedbackForString(lhs_instance_type);
10503                 TNode<Smi> rhs_feedback =
10504                     CollectFeedbackForString(rhs_instance_type);
10505                 var_type_feedback->Bind(SmiOr(lhs_feedback, rhs_feedback));
10506               }
10507               result.Bind(CallBuiltin(Builtins::kStringEqual,
10508                                       NoContextConstant(), lhs, rhs));
10509               Goto(&end);
10510             }
10511 
10512             BIND(&if_rhsisnotstring);
10513             Goto(&if_notequal);
10514           }
10515 
10516           BIND(&if_lhsisnotstring);
10517 
10518           // Check if {lhs} is a BigInt.
10519           Label if_lhsisbigint(this), if_lhsisnotbigint(this);
10520           Branch(IsBigIntInstanceType(lhs_instance_type), &if_lhsisbigint,
10521                  &if_lhsisnotbigint);
10522 
10523           BIND(&if_lhsisbigint);
10524           {
10525             // Load the instance type of {rhs}.
10526             Node* rhs_instance_type = LoadInstanceType(rhs);
10527 
10528             // Check if {rhs} is also a BigInt.
10529             Label if_rhsisbigint(this, Label::kDeferred),
10530                 if_rhsisnotbigint(this);
10531             Branch(IsBigIntInstanceType(rhs_instance_type), &if_rhsisbigint,
10532                    &if_rhsisnotbigint);
10533 
10534             BIND(&if_rhsisbigint);
10535             {
10536               if (var_type_feedback != nullptr) {
10537                 var_type_feedback->Bind(
10538                     SmiConstant(CompareOperationFeedback::kBigInt));
10539               }
10540               result.Bind(CallRuntime(Runtime::kBigIntEqualToBigInt,
10541                                       NoContextConstant(), lhs, rhs));
10542               Goto(&end);
10543             }
10544 
10545             BIND(&if_rhsisnotbigint);
10546             Goto(&if_notequal);
10547           }
10548 
10549           BIND(&if_lhsisnotbigint);
10550           if (var_type_feedback != nullptr) {
10551             // Load the instance type of {rhs}.
10552             Node* rhs_instance_type = LoadInstanceType(rhs);
10553 
10554             Label if_lhsissymbol(this), if_lhsisreceiver(this);
10555             GotoIf(IsJSReceiverInstanceType(lhs_instance_type),
10556                    &if_lhsisreceiver);
10557             Branch(IsSymbolInstanceType(lhs_instance_type), &if_lhsissymbol,
10558                    &if_notequal);
10559 
10560             BIND(&if_lhsisreceiver);
10561             {
10562               GotoIfNot(IsJSReceiverInstanceType(rhs_instance_type),
10563                         &if_notequal);
10564               var_type_feedback->Bind(
10565                   SmiConstant(CompareOperationFeedback::kReceiver));
10566               Goto(&if_notequal);
10567             }
10568 
10569             BIND(&if_lhsissymbol);
10570             {
10571               GotoIfNot(IsSymbolInstanceType(rhs_instance_type), &if_notequal);
10572               var_type_feedback->Bind(
10573                   SmiConstant(CompareOperationFeedback::kSymbol));
10574               Goto(&if_notequal);
10575             }
10576           } else {
10577             Goto(&if_notequal);
10578           }
10579         }
10580       }
10581     }
10582 
10583     BIND(&if_lhsissmi);
10584     {
10585       // We already know that {lhs} and {rhs} are not reference equal, and {lhs}
10586       // is a Smi; so {lhs} and {rhs} can only be strictly equal if {rhs} is a
10587       // HeapNumber with an equal floating point value.
10588 
10589       // Check if {rhs} is a Smi or a HeapObject.
10590       Label if_rhsissmi(this), if_rhsisnotsmi(this);
10591       Branch(TaggedIsSmi(rhs), &if_rhsissmi, &if_rhsisnotsmi);
10592 
10593       BIND(&if_rhsissmi);
10594       if (var_type_feedback != nullptr) {
10595         var_type_feedback->Bind(
10596             SmiConstant(CompareOperationFeedback::kSignedSmall));
10597       }
10598       Goto(&if_notequal);
10599 
10600       BIND(&if_rhsisnotsmi);
10601       {
10602         // Load the map of the {rhs}.
10603         Node* rhs_map = LoadMap(rhs);
10604 
10605         // The {rhs} could be a HeapNumber with the same value as {lhs}.
10606         Label if_rhsisnumber(this), if_rhsisnotnumber(this);
10607         Branch(IsHeapNumberMap(rhs_map), &if_rhsisnumber, &if_rhsisnotnumber);
10608 
10609         BIND(&if_rhsisnumber);
10610         {
10611           // Convert {lhs} and {rhs} to floating point values.
10612           Node* lhs_value = SmiToFloat64(lhs);
10613           Node* rhs_value = LoadHeapNumberValue(rhs);
10614 
10615           if (var_type_feedback != nullptr) {
10616             var_type_feedback->Bind(
10617                 SmiConstant(CompareOperationFeedback::kNumber));
10618           }
10619 
10620           // Perform a floating point comparison of {lhs} and {rhs}.
10621           Branch(Float64Equal(lhs_value, rhs_value), &if_equal, &if_notequal);
10622         }
10623 
10624         BIND(&if_rhsisnotnumber);
10625         Goto(&if_notequal);
10626       }
10627     }
10628   }
10629 
10630   BIND(&if_equal);
10631   {
10632     result.Bind(TrueConstant());
10633     Goto(&end);
10634   }
10635 
10636   BIND(&if_notequal);
10637   {
10638     result.Bind(FalseConstant());
10639     Goto(&end);
10640   }
10641 
10642   BIND(&end);
10643   return result.value();
10644 }
10645 
10646 // ECMA#sec-samevalue
10647 // This algorithm differs from the Strict Equality Comparison Algorithm in its
10648 // treatment of signed zeroes and NaNs.
BranchIfSameValue(Node * lhs,Node * rhs,Label * if_true,Label * if_false)10649 void CodeStubAssembler::BranchIfSameValue(Node* lhs, Node* rhs, Label* if_true,
10650                                           Label* if_false) {
10651   VARIABLE(var_lhs_value, MachineRepresentation::kFloat64);
10652   VARIABLE(var_rhs_value, MachineRepresentation::kFloat64);
10653   Label do_fcmp(this);
10654 
10655   // Immediately jump to {if_true} if {lhs} == {rhs}, because - unlike
10656   // StrictEqual - SameValue considers two NaNs to be equal.
10657   GotoIf(WordEqual(lhs, rhs), if_true);
10658 
10659   // Check if the {lhs} is a Smi.
10660   Label if_lhsissmi(this), if_lhsisheapobject(this);
10661   Branch(TaggedIsSmi(lhs), &if_lhsissmi, &if_lhsisheapobject);
10662 
10663   BIND(&if_lhsissmi);
10664   {
10665     // Since {lhs} is a Smi, the comparison can only yield true
10666     // iff the {rhs} is a HeapNumber with the same float64 value.
10667     GotoIf(TaggedIsSmi(rhs), if_false);
10668     GotoIfNot(IsHeapNumber(rhs), if_false);
10669     var_lhs_value.Bind(SmiToFloat64(lhs));
10670     var_rhs_value.Bind(LoadHeapNumberValue(rhs));
10671     Goto(&do_fcmp);
10672   }
10673 
10674   BIND(&if_lhsisheapobject);
10675   {
10676     // Check if the {rhs} is a Smi.
10677     Label if_rhsissmi(this), if_rhsisheapobject(this);
10678     Branch(TaggedIsSmi(rhs), &if_rhsissmi, &if_rhsisheapobject);
10679 
10680     BIND(&if_rhsissmi);
10681     {
10682       // Since {rhs} is a Smi, the comparison can only yield true
10683       // iff the {lhs} is a HeapNumber with the same float64 value.
10684       GotoIfNot(IsHeapNumber(lhs), if_false);
10685       var_lhs_value.Bind(LoadHeapNumberValue(lhs));
10686       var_rhs_value.Bind(SmiToFloat64(rhs));
10687       Goto(&do_fcmp);
10688     }
10689 
10690     BIND(&if_rhsisheapobject);
10691     {
10692       // Now this can only yield true if either both {lhs} and {rhs} are
10693       // HeapNumbers with the same value, or both are Strings with the same
10694       // character sequence, or both are BigInts with the same value.
10695       Label if_lhsisheapnumber(this), if_lhsisstring(this),
10696           if_lhsisbigint(this);
10697       Node* const lhs_map = LoadMap(lhs);
10698       GotoIf(IsHeapNumberMap(lhs_map), &if_lhsisheapnumber);
10699       Node* const lhs_instance_type = LoadMapInstanceType(lhs_map);
10700       GotoIf(IsStringInstanceType(lhs_instance_type), &if_lhsisstring);
10701       Branch(IsBigIntInstanceType(lhs_instance_type), &if_lhsisbigint,
10702              if_false);
10703 
10704       BIND(&if_lhsisheapnumber);
10705       {
10706         GotoIfNot(IsHeapNumber(rhs), if_false);
10707         var_lhs_value.Bind(LoadHeapNumberValue(lhs));
10708         var_rhs_value.Bind(LoadHeapNumberValue(rhs));
10709         Goto(&do_fcmp);
10710       }
10711 
10712       BIND(&if_lhsisstring);
10713       {
10714         // Now we can only yield true if {rhs} is also a String
10715         // with the same sequence of characters.
10716         GotoIfNot(IsString(rhs), if_false);
10717         Node* const result =
10718             CallBuiltin(Builtins::kStringEqual, NoContextConstant(), lhs, rhs);
10719         Branch(IsTrue(result), if_true, if_false);
10720       }
10721 
10722       BIND(&if_lhsisbigint);
10723       {
10724         GotoIfNot(IsBigInt(rhs), if_false);
10725         Node* const result = CallRuntime(Runtime::kBigIntEqualToBigInt,
10726                                          NoContextConstant(), lhs, rhs);
10727         Branch(IsTrue(result), if_true, if_false);
10728       }
10729     }
10730   }
10731 
10732   BIND(&do_fcmp);
10733   {
10734     Node* const lhs_value = var_lhs_value.value();
10735     Node* const rhs_value = var_rhs_value.value();
10736 
10737     Label if_equal(this), if_notequal(this);
10738     Branch(Float64Equal(lhs_value, rhs_value), &if_equal, &if_notequal);
10739 
10740     BIND(&if_equal);
10741     {
10742       // We still need to handle the case when {lhs} and {rhs} are -0.0 and
10743       // 0.0 (or vice versa). Compare the high word to
10744       // distinguish between the two.
10745       Node* const lhs_hi_word = Float64ExtractHighWord32(lhs_value);
10746       Node* const rhs_hi_word = Float64ExtractHighWord32(rhs_value);
10747 
10748       // If x is +0 and y is -0, return false.
10749       // If x is -0 and y is +0, return false.
10750       Branch(Word32Equal(lhs_hi_word, rhs_hi_word), if_true, if_false);
10751     }
10752 
10753     BIND(&if_notequal);
10754     {
10755       // Return true iff both {rhs} and {lhs} are NaN.
10756       GotoIf(Float64Equal(lhs_value, lhs_value), if_false);
10757       Branch(Float64Equal(rhs_value, rhs_value), if_false, if_true);
10758     }
10759   }
10760 }
10761 
HasProperty(SloppyTNode<HeapObject> object,SloppyTNode<Object> key,SloppyTNode<Context> context,HasPropertyLookupMode mode)10762 TNode<Oddball> CodeStubAssembler::HasProperty(SloppyTNode<HeapObject> object,
10763                                               SloppyTNode<Object> key,
10764                                               SloppyTNode<Context> context,
10765                                               HasPropertyLookupMode mode) {
10766   Label call_runtime(this, Label::kDeferred), return_true(this),
10767       return_false(this), end(this), if_proxy(this, Label::kDeferred);
10768 
10769   CodeStubAssembler::LookupInHolder lookup_property_in_holder =
10770       [this, &return_true](Node* receiver, Node* holder, Node* holder_map,
10771                            Node* holder_instance_type, Node* unique_name,
10772                            Label* next_holder, Label* if_bailout) {
10773         TryHasOwnProperty(holder, holder_map, holder_instance_type, unique_name,
10774                           &return_true, next_holder, if_bailout);
10775       };
10776 
10777   CodeStubAssembler::LookupInHolder lookup_element_in_holder =
10778       [this, &return_true, &return_false](
10779           Node* receiver, Node* holder, Node* holder_map,
10780           Node* holder_instance_type, Node* index, Label* next_holder,
10781           Label* if_bailout) {
10782         TryLookupElement(holder, holder_map, holder_instance_type, index,
10783                          &return_true, &return_false, next_holder, if_bailout);
10784       };
10785 
10786   TryPrototypeChainLookup(object, key, lookup_property_in_holder,
10787                           lookup_element_in_holder, &return_false,
10788                           &call_runtime, &if_proxy);
10789 
10790   TVARIABLE(Oddball, result);
10791 
10792   BIND(&if_proxy);
10793   {
10794     TNode<Name> name = ToName(context, key);
10795     switch (mode) {
10796       case kHasProperty:
10797         GotoIf(IsPrivateSymbol(name), &return_false);
10798 
10799         result = CAST(
10800             CallBuiltin(Builtins::kProxyHasProperty, context, object, name));
10801         Goto(&end);
10802         break;
10803       case kForInHasProperty:
10804         Goto(&call_runtime);
10805         break;
10806     }
10807   }
10808 
10809   BIND(&return_true);
10810   {
10811     result = TrueConstant();
10812     Goto(&end);
10813   }
10814 
10815   BIND(&return_false);
10816   {
10817     result = FalseConstant();
10818     Goto(&end);
10819   }
10820 
10821   BIND(&call_runtime);
10822   {
10823     Runtime::FunctionId fallback_runtime_function_id;
10824     switch (mode) {
10825       case kHasProperty:
10826         fallback_runtime_function_id = Runtime::kHasProperty;
10827         break;
10828       case kForInHasProperty:
10829         fallback_runtime_function_id = Runtime::kForInHasProperty;
10830         break;
10831     }
10832 
10833     result =
10834         CAST(CallRuntime(fallback_runtime_function_id, context, object, key));
10835     Goto(&end);
10836   }
10837 
10838   BIND(&end);
10839   CSA_ASSERT(this, IsBoolean(result.value()));
10840   return result.value();
10841 }
10842 
Typeof(Node * value)10843 Node* CodeStubAssembler::Typeof(Node* value) {
10844   VARIABLE(result_var, MachineRepresentation::kTagged);
10845 
10846   Label return_number(this, Label::kDeferred), if_oddball(this),
10847       return_function(this), return_undefined(this), return_object(this),
10848       return_string(this), return_bigint(this), return_result(this);
10849 
10850   GotoIf(TaggedIsSmi(value), &return_number);
10851 
10852   Node* map = LoadMap(value);
10853 
10854   GotoIf(IsHeapNumberMap(map), &return_number);
10855 
10856   Node* instance_type = LoadMapInstanceType(map);
10857 
10858   GotoIf(InstanceTypeEqual(instance_type, ODDBALL_TYPE), &if_oddball);
10859 
10860   Node* callable_or_undetectable_mask = Word32And(
10861       LoadMapBitField(map),
10862       Int32Constant(Map::IsCallableBit::kMask | Map::IsUndetectableBit::kMask));
10863 
10864   GotoIf(Word32Equal(callable_or_undetectable_mask,
10865                      Int32Constant(Map::IsCallableBit::kMask)),
10866          &return_function);
10867 
10868   GotoIfNot(Word32Equal(callable_or_undetectable_mask, Int32Constant(0)),
10869             &return_undefined);
10870 
10871   GotoIf(IsJSReceiverInstanceType(instance_type), &return_object);
10872 
10873   GotoIf(IsStringInstanceType(instance_type), &return_string);
10874 
10875   GotoIf(IsBigIntInstanceType(instance_type), &return_bigint);
10876 
10877   CSA_ASSERT(this, InstanceTypeEqual(instance_type, SYMBOL_TYPE));
10878   result_var.Bind(HeapConstant(isolate()->factory()->symbol_string()));
10879   Goto(&return_result);
10880 
10881   BIND(&return_number);
10882   {
10883     result_var.Bind(HeapConstant(isolate()->factory()->number_string()));
10884     Goto(&return_result);
10885   }
10886 
10887   BIND(&if_oddball);
10888   {
10889     Node* type = LoadObjectField(value, Oddball::kTypeOfOffset);
10890     result_var.Bind(type);
10891     Goto(&return_result);
10892   }
10893 
10894   BIND(&return_function);
10895   {
10896     result_var.Bind(HeapConstant(isolate()->factory()->function_string()));
10897     Goto(&return_result);
10898   }
10899 
10900   BIND(&return_undefined);
10901   {
10902     result_var.Bind(HeapConstant(isolate()->factory()->undefined_string()));
10903     Goto(&return_result);
10904   }
10905 
10906   BIND(&return_object);
10907   {
10908     result_var.Bind(HeapConstant(isolate()->factory()->object_string()));
10909     Goto(&return_result);
10910   }
10911 
10912   BIND(&return_string);
10913   {
10914     result_var.Bind(HeapConstant(isolate()->factory()->string_string()));
10915     Goto(&return_result);
10916   }
10917 
10918   BIND(&return_bigint);
10919   {
10920     result_var.Bind(HeapConstant(isolate()->factory()->bigint_string()));
10921     Goto(&return_result);
10922   }
10923 
10924   BIND(&return_result);
10925   return result_var.value();
10926 }
10927 
GetSuperConstructor(SloppyTNode<Context> context,SloppyTNode<JSFunction> active_function)10928 TNode<Object> CodeStubAssembler::GetSuperConstructor(
10929     SloppyTNode<Context> context, SloppyTNode<JSFunction> active_function) {
10930   Label is_not_constructor(this, Label::kDeferred), out(this);
10931   TVARIABLE(Object, result);
10932 
10933   TNode<Map> map = LoadMap(active_function);
10934   TNode<Object> prototype = LoadMapPrototype(map);
10935   TNode<Map> prototype_map = LoadMap(CAST(prototype));
10936   GotoIfNot(IsConstructorMap(prototype_map), &is_not_constructor);
10937 
10938   result = prototype;
10939   Goto(&out);
10940 
10941   BIND(&is_not_constructor);
10942   {
10943     CallRuntime(Runtime::kThrowNotSuperConstructor, context, prototype,
10944                 active_function);
10945     Unreachable();
10946   }
10947 
10948   BIND(&out);
10949   return result.value();
10950 }
10951 
SpeciesConstructor(SloppyTNode<Context> context,SloppyTNode<Object> object,SloppyTNode<Object> default_constructor)10952 TNode<Object> CodeStubAssembler::SpeciesConstructor(
10953     SloppyTNode<Context> context, SloppyTNode<Object> object,
10954     SloppyTNode<Object> default_constructor) {
10955   Isolate* isolate = this->isolate();
10956   TVARIABLE(Object, var_result, default_constructor);
10957 
10958   // 2. Let C be ? Get(O, "constructor").
10959   TNode<Object> constructor =
10960       GetProperty(context, object, isolate->factory()->constructor_string());
10961 
10962   // 3. If C is undefined, return defaultConstructor.
10963   Label out(this);
10964   GotoIf(IsUndefined(constructor), &out);
10965 
10966   // 4. If Type(C) is not Object, throw a TypeError exception.
10967   ThrowIfNotJSReceiver(context, constructor,
10968                        MessageTemplate::kConstructorNotReceiver);
10969 
10970   // 5. Let S be ? Get(C, @@species).
10971   TNode<Object> species =
10972       GetProperty(context, constructor, isolate->factory()->species_symbol());
10973 
10974   // 6. If S is either undefined or null, return defaultConstructor.
10975   GotoIf(IsNullOrUndefined(species), &out);
10976 
10977   // 7. If IsConstructor(S) is true, return S.
10978   Label throw_error(this);
10979   GotoIf(TaggedIsSmi(species), &throw_error);
10980   GotoIfNot(IsConstructorMap(LoadMap(CAST(species))), &throw_error);
10981   var_result = species;
10982   Goto(&out);
10983 
10984   // 8. Throw a TypeError exception.
10985   BIND(&throw_error);
10986   ThrowTypeError(context, MessageTemplate::kSpeciesNotConstructor);
10987 
10988   BIND(&out);
10989   return var_result.value();
10990 }
10991 
InstanceOf(Node * object,Node * callable,Node * context)10992 Node* CodeStubAssembler::InstanceOf(Node* object, Node* callable,
10993                                     Node* context) {
10994   VARIABLE(var_result, MachineRepresentation::kTagged);
10995   Label if_notcallable(this, Label::kDeferred),
10996       if_notreceiver(this, Label::kDeferred), if_otherhandler(this),
10997       if_nohandler(this, Label::kDeferred), return_true(this),
10998       return_false(this), return_result(this, &var_result);
10999 
11000   // Ensure that the {callable} is actually a JSReceiver.
11001   GotoIf(TaggedIsSmi(callable), &if_notreceiver);
11002   GotoIfNot(IsJSReceiver(callable), &if_notreceiver);
11003 
11004   // Load the @@hasInstance property from {callable}.
11005   Node* inst_of_handler =
11006       GetProperty(context, callable, HasInstanceSymbolConstant());
11007 
11008   // Optimize for the likely case where {inst_of_handler} is the builtin
11009   // Function.prototype[@@hasInstance] method, and emit a direct call in
11010   // that case without any additional checking.
11011   Node* native_context = LoadNativeContext(context);
11012   Node* function_has_instance =
11013       LoadContextElement(native_context, Context::FUNCTION_HAS_INSTANCE_INDEX);
11014   GotoIfNot(WordEqual(inst_of_handler, function_has_instance),
11015             &if_otherhandler);
11016   {
11017     // TODO(6786): A direct call to a TFJ builtin breaks the lazy
11018     // deserialization mechanism in two ways: first, we always pass in a
11019     // callable containing the DeserializeLazy code object (assuming that
11020     // FunctionPrototypeHasInstance is lazy). Second, a direct call (without
11021     // going through CodeFactory::Call) to DeserializeLazy will not initialize
11022     // new_target properly. For now we can avoid this by marking
11023     // FunctionPrototypeHasInstance as eager, but this should be fixed at some
11024     // point.
11025     //
11026     // Call to Function.prototype[@@hasInstance] directly.
11027     Callable builtin(BUILTIN_CODE(isolate(), FunctionPrototypeHasInstance),
11028                      CallTrampolineDescriptor(isolate()));
11029     Node* result = CallJS(builtin, context, inst_of_handler, callable, object);
11030     var_result.Bind(result);
11031     Goto(&return_result);
11032   }
11033 
11034   BIND(&if_otherhandler);
11035   {
11036     // Check if there's actually an {inst_of_handler}.
11037     GotoIf(IsNull(inst_of_handler), &if_nohandler);
11038     GotoIf(IsUndefined(inst_of_handler), &if_nohandler);
11039 
11040     // Call the {inst_of_handler} for {callable} and {object}.
11041     Node* result = CallJS(
11042         CodeFactory::Call(isolate(), ConvertReceiverMode::kNotNullOrUndefined),
11043         context, inst_of_handler, callable, object);
11044 
11045     // Convert the {result} to a Boolean.
11046     BranchIfToBooleanIsTrue(result, &return_true, &return_false);
11047   }
11048 
11049   BIND(&if_nohandler);
11050   {
11051     // Ensure that the {callable} is actually Callable.
11052     GotoIfNot(IsCallable(callable), &if_notcallable);
11053 
11054     // Use the OrdinaryHasInstance algorithm.
11055     Node* result =
11056         CallBuiltin(Builtins::kOrdinaryHasInstance, context, callable, object);
11057     var_result.Bind(result);
11058     Goto(&return_result);
11059   }
11060 
11061   BIND(&if_notcallable);
11062   { ThrowTypeError(context, MessageTemplate::kNonCallableInInstanceOfCheck); }
11063 
11064   BIND(&if_notreceiver);
11065   { ThrowTypeError(context, MessageTemplate::kNonObjectInInstanceOfCheck); }
11066 
11067   BIND(&return_true);
11068   var_result.Bind(TrueConstant());
11069   Goto(&return_result);
11070 
11071   BIND(&return_false);
11072   var_result.Bind(FalseConstant());
11073   Goto(&return_result);
11074 
11075   BIND(&return_result);
11076   return var_result.value();
11077 }
11078 
NumberInc(SloppyTNode<Number> value)11079 TNode<Number> CodeStubAssembler::NumberInc(SloppyTNode<Number> value) {
11080   TVARIABLE(Number, var_result);
11081   TVARIABLE(Float64T, var_finc_value);
11082   Label if_issmi(this), if_isnotsmi(this), do_finc(this), end(this);
11083   Branch(TaggedIsSmi(value), &if_issmi, &if_isnotsmi);
11084 
11085   BIND(&if_issmi);
11086   {
11087     Label if_overflow(this);
11088     TNode<Smi> smi_value = CAST(value);
11089     TNode<Smi> one = SmiConstant(1);
11090     var_result = TrySmiAdd(smi_value, one, &if_overflow);
11091     Goto(&end);
11092 
11093     BIND(&if_overflow);
11094     {
11095       var_finc_value = SmiToFloat64(smi_value);
11096       Goto(&do_finc);
11097     }
11098   }
11099 
11100   BIND(&if_isnotsmi);
11101   {
11102     TNode<HeapNumber> heap_number_value = CAST(value);
11103 
11104     // Load the HeapNumber value.
11105     var_finc_value = LoadHeapNumberValue(heap_number_value);
11106     Goto(&do_finc);
11107   }
11108 
11109   BIND(&do_finc);
11110   {
11111     TNode<Float64T> finc_value = var_finc_value.value();
11112     TNode<Float64T> one = Float64Constant(1.0);
11113     TNode<Float64T> finc_result = Float64Add(finc_value, one);
11114     var_result = AllocateHeapNumberWithValue(finc_result);
11115     Goto(&end);
11116   }
11117 
11118   BIND(&end);
11119   return var_result.value();
11120 }
11121 
NumberDec(SloppyTNode<Number> value)11122 TNode<Number> CodeStubAssembler::NumberDec(SloppyTNode<Number> value) {
11123   TVARIABLE(Number, var_result);
11124   TVARIABLE(Float64T, var_fdec_value);
11125   Label if_issmi(this), if_isnotsmi(this), do_fdec(this), end(this);
11126   Branch(TaggedIsSmi(value), &if_issmi, &if_isnotsmi);
11127 
11128   BIND(&if_issmi);
11129   {
11130     TNode<Smi> smi_value = CAST(value);
11131     TNode<Smi> one = SmiConstant(1);
11132     Label if_overflow(this);
11133     var_result = TrySmiSub(smi_value, one, &if_overflow);
11134     Goto(&end);
11135 
11136     BIND(&if_overflow);
11137     {
11138       var_fdec_value = SmiToFloat64(smi_value);
11139       Goto(&do_fdec);
11140     }
11141   }
11142 
11143   BIND(&if_isnotsmi);
11144   {
11145     TNode<HeapNumber> heap_number_value = CAST(value);
11146 
11147     // Load the HeapNumber value.
11148     var_fdec_value = LoadHeapNumberValue(heap_number_value);
11149     Goto(&do_fdec);
11150   }
11151 
11152   BIND(&do_fdec);
11153   {
11154     TNode<Float64T> fdec_value = var_fdec_value.value();
11155     TNode<Float64T> minus_one = Float64Constant(-1.0);
11156     TNode<Float64T> fdec_result = Float64Add(fdec_value, minus_one);
11157     var_result = AllocateHeapNumberWithValue(fdec_result);
11158     Goto(&end);
11159   }
11160 
11161   BIND(&end);
11162   return var_result.value();
11163 }
11164 
NumberAdd(SloppyTNode<Number> a,SloppyTNode<Number> b)11165 TNode<Number> CodeStubAssembler::NumberAdd(SloppyTNode<Number> a,
11166                                            SloppyTNode<Number> b) {
11167   TVARIABLE(Number, var_result);
11168   Label float_add(this, Label::kDeferred), end(this);
11169   GotoIf(TaggedIsNotSmi(a), &float_add);
11170   GotoIf(TaggedIsNotSmi(b), &float_add);
11171 
11172   // Try fast Smi addition first.
11173   var_result = TrySmiAdd(CAST(a), CAST(b), &float_add);
11174   Goto(&end);
11175 
11176   BIND(&float_add);
11177   {
11178     var_result = ChangeFloat64ToTagged(
11179         Float64Add(ChangeNumberToFloat64(a), ChangeNumberToFloat64(b)));
11180     Goto(&end);
11181   }
11182 
11183   BIND(&end);
11184   return var_result.value();
11185 }
11186 
NumberSub(SloppyTNode<Number> a,SloppyTNode<Number> b)11187 TNode<Number> CodeStubAssembler::NumberSub(SloppyTNode<Number> a,
11188                                            SloppyTNode<Number> b) {
11189   TVARIABLE(Number, var_result);
11190   Label float_sub(this, Label::kDeferred), end(this);
11191   GotoIf(TaggedIsNotSmi(a), &float_sub);
11192   GotoIf(TaggedIsNotSmi(b), &float_sub);
11193 
11194   // Try fast Smi subtraction first.
11195   var_result = TrySmiSub(CAST(a), CAST(b), &float_sub);
11196   Goto(&end);
11197 
11198   BIND(&float_sub);
11199   {
11200     var_result = ChangeFloat64ToTagged(
11201         Float64Sub(ChangeNumberToFloat64(a), ChangeNumberToFloat64(b)));
11202     Goto(&end);
11203   }
11204 
11205   BIND(&end);
11206   return var_result.value();
11207 }
11208 
GotoIfNotNumber(Node * input,Label * is_not_number)11209 void CodeStubAssembler::GotoIfNotNumber(Node* input, Label* is_not_number) {
11210   Label is_number(this);
11211   GotoIf(TaggedIsSmi(input), &is_number);
11212   Branch(IsHeapNumber(input), &is_number, is_not_number);
11213   BIND(&is_number);
11214 }
11215 
GotoIfNumber(Node * input,Label * is_number)11216 void CodeStubAssembler::GotoIfNumber(Node* input, Label* is_number) {
11217   GotoIf(TaggedIsSmi(input), is_number);
11218   GotoIf(IsHeapNumber(input), is_number);
11219 }
11220 
BitwiseOp(Node * left32,Node * right32,Operation bitwise_op)11221 TNode<Number> CodeStubAssembler::BitwiseOp(Node* left32, Node* right32,
11222                                            Operation bitwise_op) {
11223   switch (bitwise_op) {
11224     case Operation::kBitwiseAnd:
11225       return ChangeInt32ToTagged(Signed(Word32And(left32, right32)));
11226     case Operation::kBitwiseOr:
11227       return ChangeInt32ToTagged(Signed(Word32Or(left32, right32)));
11228     case Operation::kBitwiseXor:
11229       return ChangeInt32ToTagged(Signed(Word32Xor(left32, right32)));
11230     case Operation::kShiftLeft:
11231       if (!Word32ShiftIsSafe()) {
11232         right32 = Word32And(right32, Int32Constant(0x1F));
11233       }
11234       return ChangeInt32ToTagged(Signed(Word32Shl(left32, right32)));
11235     case Operation::kShiftRight:
11236       if (!Word32ShiftIsSafe()) {
11237         right32 = Word32And(right32, Int32Constant(0x1F));
11238       }
11239       return ChangeInt32ToTagged(Signed(Word32Sar(left32, right32)));
11240     case Operation::kShiftRightLogical:
11241       if (!Word32ShiftIsSafe()) {
11242         right32 = Word32And(right32, Int32Constant(0x1F));
11243       }
11244       return ChangeUint32ToTagged(Unsigned(Word32Shr(left32, right32)));
11245     default:
11246       break;
11247   }
11248   UNREACHABLE();
11249 }
11250 
11251 // ES #sec-createarrayiterator
CreateArrayIterator(TNode<Context> context,TNode<Object> object,IterationKind kind)11252 TNode<JSArrayIterator> CodeStubAssembler::CreateArrayIterator(
11253     TNode<Context> context, TNode<Object> object, IterationKind kind) {
11254   TNode<Context> native_context = LoadNativeContext(context);
11255   TNode<Map> iterator_map = CAST(LoadContextElement(
11256       native_context, Context::INITIAL_ARRAY_ITERATOR_MAP_INDEX));
11257   Node* iterator = Allocate(JSArrayIterator::kSize);
11258   StoreMapNoWriteBarrier(iterator, iterator_map);
11259   StoreObjectFieldRoot(iterator, JSArrayIterator::kPropertiesOrHashOffset,
11260                        Heap::kEmptyFixedArrayRootIndex);
11261   StoreObjectFieldRoot(iterator, JSArrayIterator::kElementsOffset,
11262                        Heap::kEmptyFixedArrayRootIndex);
11263   StoreObjectFieldNoWriteBarrier(
11264       iterator, JSArrayIterator::kIteratedObjectOffset, object);
11265   StoreObjectFieldNoWriteBarrier(iterator, JSArrayIterator::kNextIndexOffset,
11266                                  SmiConstant(0));
11267   StoreObjectFieldNoWriteBarrier(
11268       iterator, JSArrayIterator::kKindOffset,
11269       SmiConstant(Smi::FromInt(static_cast<int>(kind))));
11270   return CAST(iterator);
11271 }
11272 
AllocateJSIteratorResult(Node * context,Node * value,Node * done)11273 Node* CodeStubAssembler::AllocateJSIteratorResult(Node* context, Node* value,
11274                                                   Node* done) {
11275   CSA_ASSERT(this, IsBoolean(done));
11276   Node* native_context = LoadNativeContext(context);
11277   Node* map =
11278       LoadContextElement(native_context, Context::ITERATOR_RESULT_MAP_INDEX);
11279   Node* result = Allocate(JSIteratorResult::kSize);
11280   StoreMapNoWriteBarrier(result, map);
11281   StoreObjectFieldRoot(result, JSIteratorResult::kPropertiesOrHashOffset,
11282                        Heap::kEmptyFixedArrayRootIndex);
11283   StoreObjectFieldRoot(result, JSIteratorResult::kElementsOffset,
11284                        Heap::kEmptyFixedArrayRootIndex);
11285   StoreObjectFieldNoWriteBarrier(result, JSIteratorResult::kValueOffset, value);
11286   StoreObjectFieldNoWriteBarrier(result, JSIteratorResult::kDoneOffset, done);
11287   return result;
11288 }
11289 
AllocateJSIteratorResultForEntry(Node * context,Node * key,Node * value)11290 Node* CodeStubAssembler::AllocateJSIteratorResultForEntry(Node* context,
11291                                                           Node* key,
11292                                                           Node* value) {
11293   Node* native_context = LoadNativeContext(context);
11294   Node* length = SmiConstant(2);
11295   int const elements_size = FixedArray::SizeFor(2);
11296   Node* elements =
11297       Allocate(elements_size + JSArray::kSize + JSIteratorResult::kSize);
11298   StoreObjectFieldRoot(elements, FixedArray::kMapOffset,
11299                        Heap::kFixedArrayMapRootIndex);
11300   StoreObjectFieldNoWriteBarrier(elements, FixedArray::kLengthOffset, length);
11301   StoreFixedArrayElement(elements, 0, key);
11302   StoreFixedArrayElement(elements, 1, value);
11303   Node* array_map = LoadContextElement(
11304       native_context, Context::JS_ARRAY_PACKED_ELEMENTS_MAP_INDEX);
11305   Node* array = InnerAllocate(elements, elements_size);
11306   StoreMapNoWriteBarrier(array, array_map);
11307   StoreObjectFieldRoot(array, JSArray::kPropertiesOrHashOffset,
11308                        Heap::kEmptyFixedArrayRootIndex);
11309   StoreObjectFieldNoWriteBarrier(array, JSArray::kElementsOffset, elements);
11310   StoreObjectFieldNoWriteBarrier(array, JSArray::kLengthOffset, length);
11311   Node* iterator_map =
11312       LoadContextElement(native_context, Context::ITERATOR_RESULT_MAP_INDEX);
11313   Node* result = InnerAllocate(array, JSArray::kSize);
11314   StoreMapNoWriteBarrier(result, iterator_map);
11315   StoreObjectFieldRoot(result, JSIteratorResult::kPropertiesOrHashOffset,
11316                        Heap::kEmptyFixedArrayRootIndex);
11317   StoreObjectFieldRoot(result, JSIteratorResult::kElementsOffset,
11318                        Heap::kEmptyFixedArrayRootIndex);
11319   StoreObjectFieldNoWriteBarrier(result, JSIteratorResult::kValueOffset, array);
11320   StoreObjectFieldRoot(result, JSIteratorResult::kDoneOffset,
11321                        Heap::kFalseValueRootIndex);
11322   return result;
11323 }
11324 
ArraySpeciesCreate(TNode<Context> context,TNode<Object> o,TNode<Number> len)11325 Node* CodeStubAssembler::ArraySpeciesCreate(TNode<Context> context,
11326                                             TNode<Object> o,
11327                                             TNode<Number> len) {
11328   Node* constructor =
11329       CallRuntime(Runtime::kArraySpeciesConstructor, context, o);
11330   return ConstructJS(CodeFactory::Construct(isolate()), context, constructor,
11331                      len);
11332 }
11333 
IsDetachedBuffer(Node * buffer)11334 Node* CodeStubAssembler::IsDetachedBuffer(Node* buffer) {
11335   CSA_ASSERT(this, HasInstanceType(buffer, JS_ARRAY_BUFFER_TYPE));
11336 
11337   Node* buffer_bit_field = LoadObjectField(
11338       buffer, JSArrayBuffer::kBitFieldOffset, MachineType::Uint32());
11339   return IsSetWord32<JSArrayBuffer::WasNeutered>(buffer_bit_field);
11340 }
11341 
CodeStubArguments(CodeStubAssembler * assembler,Node * argc,Node * fp,CodeStubAssembler::ParameterMode param_mode,ReceiverMode receiver_mode)11342 CodeStubArguments::CodeStubArguments(
11343     CodeStubAssembler* assembler, Node* argc, Node* fp,
11344     CodeStubAssembler::ParameterMode param_mode, ReceiverMode receiver_mode)
11345     : assembler_(assembler),
11346       argc_mode_(param_mode),
11347       receiver_mode_(receiver_mode),
11348       argc_(argc),
11349       arguments_(),
11350       fp_(fp != nullptr ? fp : assembler_->LoadFramePointer()) {
11351   Node* offset = assembler_->ElementOffsetFromIndex(
11352       argc_, PACKED_ELEMENTS, param_mode,
11353       (StandardFrameConstants::kFixedSlotCountAboveFp - 1) * kPointerSize);
11354   arguments_ = assembler_->UncheckedCast<RawPtr<Object>>(
11355       assembler_->IntPtrAdd(fp_, offset));
11356 }
11357 
GetReceiver() const11358 TNode<Object> CodeStubArguments::GetReceiver() const {
11359   DCHECK_EQ(receiver_mode_, ReceiverMode::kHasReceiver);
11360   return assembler_->UncheckedCast<Object>(
11361       assembler_->Load(MachineType::AnyTagged(), arguments_,
11362                        assembler_->IntPtrConstant(kPointerSize)));
11363 }
11364 
AtIndexPtr(Node * index,CodeStubAssembler::ParameterMode mode) const11365 TNode<RawPtr<Object>> CodeStubArguments::AtIndexPtr(
11366     Node* index, CodeStubAssembler::ParameterMode mode) const {
11367   typedef compiler::Node Node;
11368   Node* negated_index = assembler_->IntPtrOrSmiSub(
11369       assembler_->IntPtrOrSmiConstant(0, mode), index, mode);
11370   Node* offset = assembler_->ElementOffsetFromIndex(negated_index,
11371                                                     PACKED_ELEMENTS, mode, 0);
11372   return assembler_->UncheckedCast<RawPtr<Object>>(assembler_->IntPtrAdd(
11373       assembler_->UncheckedCast<IntPtrT>(arguments_), offset));
11374 }
11375 
AtIndex(Node * index,CodeStubAssembler::ParameterMode mode) const11376 TNode<Object> CodeStubArguments::AtIndex(
11377     Node* index, CodeStubAssembler::ParameterMode mode) const {
11378   DCHECK_EQ(argc_mode_, mode);
11379   CSA_ASSERT(assembler_,
11380              assembler_->UintPtrOrSmiLessThan(index, GetLength(mode), mode));
11381   return assembler_->UncheckedCast<Object>(
11382       assembler_->Load(MachineType::AnyTagged(), AtIndexPtr(index, mode)));
11383 }
11384 
AtIndex(int index) const11385 TNode<Object> CodeStubArguments::AtIndex(int index) const {
11386   return AtIndex(assembler_->IntPtrConstant(index));
11387 }
11388 
GetOptionalArgumentValue(int index,TNode<Object> default_value)11389 TNode<Object> CodeStubArguments::GetOptionalArgumentValue(
11390     int index, TNode<Object> default_value) {
11391   CodeStubAssembler::TVariable<Object> result(assembler_);
11392   CodeStubAssembler::Label argument_missing(assembler_),
11393       argument_done(assembler_, &result);
11394 
11395   assembler_->GotoIf(assembler_->UintPtrOrSmiGreaterThanOrEqual(
11396                          assembler_->IntPtrOrSmiConstant(index, argc_mode_),
11397                          argc_, argc_mode_),
11398                      &argument_missing);
11399   result = AtIndex(index);
11400   assembler_->Goto(&argument_done);
11401 
11402   assembler_->BIND(&argument_missing);
11403   result = default_value;
11404   assembler_->Goto(&argument_done);
11405 
11406   assembler_->BIND(&argument_done);
11407   return result.value();
11408 }
11409 
GetOptionalArgumentValue(TNode<IntPtrT> index,TNode<Object> default_value)11410 TNode<Object> CodeStubArguments::GetOptionalArgumentValue(
11411     TNode<IntPtrT> index, TNode<Object> default_value) {
11412   CodeStubAssembler::TVariable<Object> result(assembler_);
11413   CodeStubAssembler::Label argument_missing(assembler_),
11414       argument_done(assembler_, &result);
11415 
11416   assembler_->GotoIf(
11417       assembler_->UintPtrOrSmiGreaterThanOrEqual(
11418           assembler_->IntPtrToParameter(index, argc_mode_), argc_, argc_mode_),
11419       &argument_missing);
11420   result = AtIndex(index);
11421   assembler_->Goto(&argument_done);
11422 
11423   assembler_->BIND(&argument_missing);
11424   result = default_value;
11425   assembler_->Goto(&argument_done);
11426 
11427   assembler_->BIND(&argument_done);
11428   return result.value();
11429 }
11430 
ForEach(const CodeStubAssembler::VariableList & vars,const CodeStubArguments::ForEachBodyFunction & body,Node * first,Node * last,CodeStubAssembler::ParameterMode mode)11431 void CodeStubArguments::ForEach(
11432     const CodeStubAssembler::VariableList& vars,
11433     const CodeStubArguments::ForEachBodyFunction& body, Node* first, Node* last,
11434     CodeStubAssembler::ParameterMode mode) {
11435   assembler_->Comment("CodeStubArguments::ForEach");
11436   if (first == nullptr) {
11437     first = assembler_->IntPtrOrSmiConstant(0, mode);
11438   }
11439   if (last == nullptr) {
11440     DCHECK_EQ(mode, argc_mode_);
11441     last = argc_;
11442   }
11443   Node* start = assembler_->IntPtrSub(
11444       assembler_->UncheckedCast<IntPtrT>(arguments_),
11445       assembler_->ElementOffsetFromIndex(first, PACKED_ELEMENTS, mode));
11446   Node* end = assembler_->IntPtrSub(
11447       assembler_->UncheckedCast<IntPtrT>(arguments_),
11448       assembler_->ElementOffsetFromIndex(last, PACKED_ELEMENTS, mode));
11449   assembler_->BuildFastLoop(vars, start, end,
11450                             [this, &body](Node* current) {
11451                               Node* arg = assembler_->Load(
11452                                   MachineType::AnyTagged(), current);
11453                               body(arg);
11454                             },
11455                             -kPointerSize, CodeStubAssembler::INTPTR_PARAMETERS,
11456                             CodeStubAssembler::IndexAdvanceMode::kPost);
11457 }
11458 
PopAndReturn(Node * value)11459 void CodeStubArguments::PopAndReturn(Node* value) {
11460   Node* pop_count;
11461   if (receiver_mode_ == ReceiverMode::kHasReceiver) {
11462     pop_count = assembler_->IntPtrOrSmiAdd(
11463         argc_, assembler_->IntPtrOrSmiConstant(1, argc_mode_), argc_mode_);
11464   } else {
11465     pop_count = argc_;
11466   }
11467 
11468   assembler_->PopAndReturn(assembler_->ParameterToIntPtr(pop_count, argc_mode_),
11469                            value);
11470 }
11471 
IsFastElementsKind(Node * elements_kind)11472 Node* CodeStubAssembler::IsFastElementsKind(Node* elements_kind) {
11473   STATIC_ASSERT(FIRST_ELEMENTS_KIND == FIRST_FAST_ELEMENTS_KIND);
11474   return Uint32LessThanOrEqual(elements_kind,
11475                                Int32Constant(LAST_FAST_ELEMENTS_KIND));
11476 }
11477 
IsFastSmiOrTaggedElementsKind(Node * elements_kind)11478 Node* CodeStubAssembler::IsFastSmiOrTaggedElementsKind(Node* elements_kind) {
11479   STATIC_ASSERT(FIRST_ELEMENTS_KIND == FIRST_FAST_ELEMENTS_KIND);
11480   STATIC_ASSERT(PACKED_DOUBLE_ELEMENTS > TERMINAL_FAST_ELEMENTS_KIND);
11481   STATIC_ASSERT(HOLEY_DOUBLE_ELEMENTS > TERMINAL_FAST_ELEMENTS_KIND);
11482   return Uint32LessThanOrEqual(elements_kind,
11483                                Int32Constant(TERMINAL_FAST_ELEMENTS_KIND));
11484 }
11485 
IsFastSmiElementsKind(Node * elements_kind)11486 Node* CodeStubAssembler::IsFastSmiElementsKind(Node* elements_kind) {
11487   return Uint32LessThanOrEqual(elements_kind,
11488                                Int32Constant(HOLEY_SMI_ELEMENTS));
11489 }
11490 
IsHoleyFastElementsKind(Node * elements_kind)11491 Node* CodeStubAssembler::IsHoleyFastElementsKind(Node* elements_kind) {
11492   CSA_ASSERT(this, IsFastElementsKind(elements_kind));
11493 
11494   STATIC_ASSERT(HOLEY_SMI_ELEMENTS == (PACKED_SMI_ELEMENTS | 1));
11495   STATIC_ASSERT(HOLEY_ELEMENTS == (PACKED_ELEMENTS | 1));
11496   STATIC_ASSERT(HOLEY_DOUBLE_ELEMENTS == (PACKED_DOUBLE_ELEMENTS | 1));
11497   return IsSetWord32(elements_kind, 1);
11498 }
11499 
IsElementsKindGreaterThan(Node * target_kind,ElementsKind reference_kind)11500 Node* CodeStubAssembler::IsElementsKindGreaterThan(
11501     Node* target_kind, ElementsKind reference_kind) {
11502   return Int32GreaterThan(target_kind, Int32Constant(reference_kind));
11503 }
11504 
IsDebugActive()11505 Node* CodeStubAssembler::IsDebugActive() {
11506   Node* is_debug_active = Load(
11507       MachineType::Uint8(),
11508       ExternalConstant(ExternalReference::debug_is_active_address(isolate())));
11509   return Word32NotEqual(is_debug_active, Int32Constant(0));
11510 }
11511 
IsPromiseHookEnabledOrDebugIsActive()11512 Node* CodeStubAssembler::IsPromiseHookEnabledOrDebugIsActive() {
11513   Node* const promise_hook_or_debug_is_active =
11514       Load(MachineType::Uint8(),
11515            ExternalConstant(
11516                ExternalReference::promise_hook_or_debug_is_active_address(
11517                    isolate())));
11518   return Word32NotEqual(promise_hook_or_debug_is_active, Int32Constant(0));
11519 }
11520 
LoadBuiltin(TNode<Smi> builtin_id)11521 TNode<Code> CodeStubAssembler::LoadBuiltin(TNode<Smi> builtin_id) {
11522   CSA_ASSERT(this, SmiGreaterThanOrEqual(builtin_id, SmiConstant(0)));
11523   CSA_ASSERT(this,
11524              SmiLessThan(builtin_id, SmiConstant(Builtins::builtin_count)));
11525 
11526   int const kSmiShiftBits = kSmiShiftSize + kSmiTagSize;
11527   int index_shift = kPointerSizeLog2 - kSmiShiftBits;
11528   TNode<WordT> table_index =
11529       index_shift >= 0 ? WordShl(BitcastTaggedToWord(builtin_id), index_shift)
11530                        : WordSar(BitcastTaggedToWord(builtin_id), -index_shift);
11531 
11532   return CAST(
11533       Load(MachineType::TaggedPointer(),
11534            ExternalConstant(ExternalReference::builtins_address(isolate())),
11535            table_index));
11536 }
11537 
GetSharedFunctionInfoCode(SloppyTNode<SharedFunctionInfo> shared_info)11538 TNode<Code> CodeStubAssembler::GetSharedFunctionInfoCode(
11539     SloppyTNode<SharedFunctionInfo> shared_info) {
11540   TNode<Object> sfi_data =
11541       LoadObjectField(shared_info, SharedFunctionInfo::kFunctionDataOffset);
11542 
11543   TYPED_VARIABLE_DEF(Code, sfi_code, this);
11544 
11545   Label done(this);
11546   Label check_instance_type(this);
11547 
11548   // IsSmi: Is builtin
11549   GotoIf(TaggedIsNotSmi(sfi_data), &check_instance_type);
11550   sfi_code = LoadBuiltin(CAST(sfi_data));
11551   Goto(&done);
11552 
11553   // Switch on data's instance type.
11554   BIND(&check_instance_type);
11555   TNode<Int32T> data_type = LoadInstanceType(CAST(sfi_data));
11556 
11557   int32_t case_values[] = {BYTECODE_ARRAY_TYPE,
11558                            WASM_EXPORTED_FUNCTION_DATA_TYPE, FIXED_ARRAY_TYPE,
11559                            TUPLE2_TYPE, FUNCTION_TEMPLATE_INFO_TYPE};
11560   Label check_is_bytecode_array(this);
11561   Label check_is_exported_function_data(this);
11562   Label check_is_fixed_array(this);
11563   Label check_is_pre_parsed_scope_data(this);
11564   Label check_is_function_template_info(this);
11565   Label check_is_interpreter_data(this);
11566   Label* case_labels[] = {
11567       &check_is_bytecode_array, &check_is_exported_function_data,
11568       &check_is_fixed_array, &check_is_pre_parsed_scope_data,
11569       &check_is_function_template_info};
11570   STATIC_ASSERT(arraysize(case_values) == arraysize(case_labels));
11571   Switch(data_type, &check_is_interpreter_data, case_values, case_labels,
11572          arraysize(case_labels));
11573 
11574   // IsBytecodeArray: Interpret bytecode
11575   BIND(&check_is_bytecode_array);
11576   DCHECK(!Builtins::IsLazy(Builtins::kInterpreterEntryTrampoline));
11577   sfi_code = HeapConstant(BUILTIN_CODE(isolate(), InterpreterEntryTrampoline));
11578   Goto(&done);
11579 
11580   // IsWasmExportedFunctionData: Use the wrapper code
11581   BIND(&check_is_exported_function_data);
11582   sfi_code = CAST(LoadObjectField(
11583       CAST(sfi_data), WasmExportedFunctionData::kWrapperCodeOffset));
11584   Goto(&done);
11585 
11586   // IsFixedArray: Instantiate using AsmWasmData
11587   BIND(&check_is_fixed_array);
11588   DCHECK(!Builtins::IsLazy(Builtins::kInstantiateAsmJs));
11589   sfi_code = HeapConstant(BUILTIN_CODE(isolate(), InstantiateAsmJs));
11590   Goto(&done);
11591 
11592   // IsPreParsedScopeData: Compile lazy
11593   BIND(&check_is_pre_parsed_scope_data);
11594   DCHECK(!Builtins::IsLazy(Builtins::kCompileLazy));
11595   sfi_code = HeapConstant(BUILTIN_CODE(isolate(), CompileLazy));
11596   Goto(&done);
11597 
11598   // IsFunctionTemplateInfo: API call
11599   BIND(&check_is_function_template_info);
11600   DCHECK(!Builtins::IsLazy(Builtins::kHandleApiCall));
11601   sfi_code = HeapConstant(BUILTIN_CODE(isolate(), HandleApiCall));
11602   Goto(&done);
11603 
11604   // IsInterpreterData: Interpret bytecode
11605   BIND(&check_is_interpreter_data);
11606   // This is the default branch, so assert that we have the expected data type.
11607   CSA_ASSERT(this,
11608              Word32Equal(data_type, Int32Constant(INTERPRETER_DATA_TYPE)));
11609   sfi_code = CAST(LoadObjectField(
11610       CAST(sfi_data), InterpreterData::kInterpreterTrampolineOffset));
11611   Goto(&done);
11612 
11613   BIND(&done);
11614   return sfi_code.value();
11615 }
11616 
AllocateFunctionWithMapAndContext(Node * map,Node * shared_info,Node * context)11617 Node* CodeStubAssembler::AllocateFunctionWithMapAndContext(Node* map,
11618                                                            Node* shared_info,
11619                                                            Node* context) {
11620   CSA_SLOW_ASSERT(this, IsMap(map));
11621 
11622   Node* const code = GetSharedFunctionInfoCode(shared_info);
11623 
11624   // TODO(ishell): All the callers of this function pass map loaded from
11625   // Context::STRICT_FUNCTION_WITHOUT_PROTOTYPE_MAP_INDEX. So we can remove
11626   // map parameter.
11627   CSA_ASSERT(this, Word32BinaryNot(IsConstructorMap(map)));
11628   CSA_ASSERT(this, Word32BinaryNot(IsFunctionWithPrototypeSlotMap(map)));
11629   Node* const fun = Allocate(JSFunction::kSizeWithoutPrototype);
11630   STATIC_ASSERT(JSFunction::kSizeWithoutPrototype == 7 * kPointerSize);
11631   StoreMapNoWriteBarrier(fun, map);
11632   StoreObjectFieldRoot(fun, JSObject::kPropertiesOrHashOffset,
11633                        Heap::kEmptyFixedArrayRootIndex);
11634   StoreObjectFieldRoot(fun, JSObject::kElementsOffset,
11635                        Heap::kEmptyFixedArrayRootIndex);
11636   StoreObjectFieldRoot(fun, JSFunction::kFeedbackCellOffset,
11637                        Heap::kManyClosuresCellRootIndex);
11638   StoreObjectFieldNoWriteBarrier(fun, JSFunction::kSharedFunctionInfoOffset,
11639                                  shared_info);
11640   StoreObjectFieldNoWriteBarrier(fun, JSFunction::kContextOffset, context);
11641   StoreObjectFieldNoWriteBarrier(fun, JSFunction::kCodeOffset, code);
11642   return fun;
11643 }
11644 
MarkerIsFrameType(Node * marker_or_function,StackFrame::Type frame_type)11645 Node* CodeStubAssembler::MarkerIsFrameType(Node* marker_or_function,
11646                                            StackFrame::Type frame_type) {
11647   return WordEqual(marker_or_function,
11648                    IntPtrConstant(StackFrame::TypeToMarker(frame_type)));
11649 }
11650 
MarkerIsNotFrameType(Node * marker_or_function,StackFrame::Type frame_type)11651 Node* CodeStubAssembler::MarkerIsNotFrameType(Node* marker_or_function,
11652                                               StackFrame::Type frame_type) {
11653   return WordNotEqual(marker_or_function,
11654                       IntPtrConstant(StackFrame::TypeToMarker(frame_type)));
11655 }
11656 
CheckPrototypeEnumCache(Node * receiver,Node * receiver_map,Label * if_fast,Label * if_slow)11657 void CodeStubAssembler::CheckPrototypeEnumCache(Node* receiver,
11658                                                 Node* receiver_map,
11659                                                 Label* if_fast,
11660                                                 Label* if_slow) {
11661   VARIABLE(var_object, MachineRepresentation::kTagged, receiver);
11662   VARIABLE(var_object_map, MachineRepresentation::kTagged, receiver_map);
11663 
11664   Label loop(this, {&var_object, &var_object_map}), done_loop(this);
11665   Goto(&loop);
11666   BIND(&loop);
11667   {
11668     // Check that there are no elements on the current {object}.
11669     Label if_no_elements(this);
11670     Node* object = var_object.value();
11671     Node* object_map = var_object_map.value();
11672 
11673     // The following relies on the elements only aliasing with JSProxy::target,
11674     // which is a Javascript value and hence cannot be confused with an elements
11675     // backing store.
11676     STATIC_ASSERT(JSObject::kElementsOffset == JSProxy::kTargetOffset);
11677     Node* object_elements = LoadObjectField(object, JSObject::kElementsOffset);
11678     GotoIf(IsEmptyFixedArray(object_elements), &if_no_elements);
11679     GotoIf(IsEmptySlowElementDictionary(object_elements), &if_no_elements);
11680 
11681     // It might still be an empty JSArray.
11682     GotoIfNot(IsJSArrayMap(object_map), if_slow);
11683     Node* object_length = LoadJSArrayLength(object);
11684     Branch(WordEqual(object_length, SmiConstant(0)), &if_no_elements, if_slow);
11685 
11686     // Continue with the {object}s prototype.
11687     BIND(&if_no_elements);
11688     object = LoadMapPrototype(object_map);
11689     GotoIf(IsNull(object), if_fast);
11690 
11691     // For all {object}s but the {receiver}, check that the cache is empty.
11692     var_object.Bind(object);
11693     object_map = LoadMap(object);
11694     var_object_map.Bind(object_map);
11695     Node* object_enum_length = LoadMapEnumLength(object_map);
11696     Branch(WordEqual(object_enum_length, IntPtrConstant(0)), &loop, if_slow);
11697   }
11698 }
11699 
CheckEnumCache(Node * receiver,Label * if_empty,Label * if_runtime)11700 Node* CodeStubAssembler::CheckEnumCache(Node* receiver, Label* if_empty,
11701                                         Label* if_runtime) {
11702   Label if_fast(this), if_cache(this), if_no_cache(this, Label::kDeferred);
11703   Node* receiver_map = LoadMap(receiver);
11704 
11705   // Check if the enum length field of the {receiver} is properly initialized,
11706   // indicating that there is an enum cache.
11707   Node* receiver_enum_length = LoadMapEnumLength(receiver_map);
11708   Branch(WordEqual(receiver_enum_length,
11709                    IntPtrConstant(kInvalidEnumCacheSentinel)),
11710          &if_no_cache, &if_cache);
11711 
11712   BIND(&if_no_cache);
11713   {
11714     // Avoid runtime-call for empty dictionary receivers.
11715     GotoIfNot(IsDictionaryMap(receiver_map), if_runtime);
11716     Node* properties = LoadSlowProperties(receiver);
11717     Node* length = LoadFixedArrayElement(
11718         properties, NameDictionary::kNumberOfElementsIndex);
11719     GotoIfNot(WordEqual(length, SmiConstant(0)), if_runtime);
11720     // Check that there are no elements on the {receiver} and its prototype
11721     // chain. Given that we do not create an EnumCache for dict-mode objects,
11722     // directly jump to {if_empty} if there are no elements and no properties
11723     // on the {receiver}.
11724     CheckPrototypeEnumCache(receiver, receiver_map, if_empty, if_runtime);
11725   }
11726 
11727   // Check that there are no elements on the fast {receiver} and its
11728   // prototype chain.
11729   BIND(&if_cache);
11730   CheckPrototypeEnumCache(receiver, receiver_map, &if_fast, if_runtime);
11731 
11732   BIND(&if_fast);
11733   return receiver_map;
11734 }
11735 
GetArgumentsLength(CodeStubArguments * args)11736 TNode<IntPtrT> CodeStubAssembler::GetArgumentsLength(CodeStubArguments* args) {
11737   return args->GetLength();
11738 }
11739 
GetArgumentValue(CodeStubArguments * args,TNode<IntPtrT> index)11740 TNode<Object> CodeStubAssembler::GetArgumentValue(CodeStubArguments* args,
11741                                                   TNode<IntPtrT> index) {
11742   return args->GetOptionalArgumentValue(index);
11743 }
11744 
GetArgumentValueSmiIndex(CodeStubArguments * args,TNode<Smi> index)11745 TNode<Object> CodeStubAssembler::GetArgumentValueSmiIndex(
11746     CodeStubArguments* args, TNode<Smi> index) {
11747   return args->GetOptionalArgumentValue(SmiUntag(index));
11748 }
11749 
Print(const char * s)11750 void CodeStubAssembler::Print(const char* s) {
11751   std::string formatted(s);
11752   formatted += "\n";
11753   CallRuntime(Runtime::kGlobalPrint, NoContextConstant(),
11754               StringConstant(formatted.c_str()));
11755 }
11756 
Print(const char * prefix,Node * tagged_value)11757 void CodeStubAssembler::Print(const char* prefix, Node* tagged_value) {
11758   if (prefix != nullptr) {
11759     std::string formatted(prefix);
11760     formatted += ": ";
11761     Handle<String> string = isolate()->factory()->NewStringFromAsciiChecked(
11762         formatted.c_str(), TENURED);
11763     CallRuntime(Runtime::kGlobalPrint, NoContextConstant(),
11764                 HeapConstant(string));
11765   }
11766   CallRuntime(Runtime::kDebugPrint, NoContextConstant(), tagged_value);
11767 }
11768 
PerformStackCheck(Node * context)11769 void CodeStubAssembler::PerformStackCheck(Node* context) {
11770   Label ok(this), stack_check_interrupt(this, Label::kDeferred);
11771 
11772   Node* sp = LoadStackPointer();
11773   Node* stack_limit = Load(
11774       MachineType::Pointer(),
11775       ExternalConstant(ExternalReference::address_of_stack_limit(isolate())));
11776   Node* interrupt = UintPtrLessThan(sp, stack_limit);
11777 
11778   Branch(interrupt, &stack_check_interrupt, &ok);
11779 
11780   BIND(&stack_check_interrupt);
11781   {
11782     CallRuntime(Runtime::kStackGuard, context);
11783     Goto(&ok);
11784   }
11785 
11786   BIND(&ok);
11787 }
11788 
InitializeFunctionContext(Node * native_context,Node * context,int slots)11789 void CodeStubAssembler::InitializeFunctionContext(Node* native_context,
11790                                                   Node* context, int slots) {
11791   DCHECK_GE(slots, Context::MIN_CONTEXT_SLOTS);
11792   StoreMapNoWriteBarrier(context, Heap::kFunctionContextMapRootIndex);
11793   StoreObjectFieldNoWriteBarrier(context, FixedArray::kLengthOffset,
11794                                  SmiConstant(slots));
11795 
11796   Node* const empty_scope_info =
11797       LoadContextElement(native_context, Context::SCOPE_INFO_INDEX);
11798   StoreContextElementNoWriteBarrier(context, Context::SCOPE_INFO_INDEX,
11799                                     empty_scope_info);
11800   StoreContextElementNoWriteBarrier(context, Context::PREVIOUS_INDEX,
11801                                     UndefinedConstant());
11802   StoreContextElementNoWriteBarrier(context, Context::EXTENSION_INDEX,
11803                                     TheHoleConstant());
11804   StoreContextElementNoWriteBarrier(context, Context::NATIVE_CONTEXT_INDEX,
11805                                     native_context);
11806 }
11807 
11808 }  // namespace internal
11809 }  // namespace v8
11810