1 // Copyright 2017 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/api/api.h"
6 #include "src/builtins/builtins-utils-gen.h"
7 #include "src/builtins/builtins.h"
8 #include "src/codegen/code-stub-assembler.h"
9 #include "src/codegen/macro-assembler.h"
10 #include "src/heap/heap-inl.h" // crbug.com/v8/8499
11 #include "src/ic/accessor-assembler.h"
12 #include "src/ic/keyed-store-generic.h"
13 #include "src/logging/counters.h"
14 #include "src/objects/debug-objects.h"
15 #include "src/objects/shared-function-info.h"
16 #include "src/runtime/runtime.h"
17
18 namespace v8 {
19 namespace internal {
20
21 // -----------------------------------------------------------------------------
22 // Stack checks.
23
Generate_StackCheck(MacroAssembler * masm)24 void Builtins::Generate_StackCheck(MacroAssembler* masm) {
25 masm->TailCallRuntime(Runtime::kStackGuard);
26 }
27
28 // -----------------------------------------------------------------------------
29 // TurboFan support builtins.
30
TF_BUILTIN(CopyFastSmiOrObjectElements,CodeStubAssembler)31 TF_BUILTIN(CopyFastSmiOrObjectElements, CodeStubAssembler) {
32 TNode<JSObject> js_object = CAST(Parameter(Descriptor::kObject));
33
34 // Load the {object}s elements.
35 TNode<FixedArrayBase> source =
36 CAST(LoadObjectField(js_object, JSObject::kElementsOffset));
37 TNode<FixedArrayBase> target =
38 CloneFixedArray(source, ExtractFixedArrayFlag::kFixedArrays);
39 StoreObjectField(js_object, JSObject::kElementsOffset, target);
40 Return(target);
41 }
42
TF_BUILTIN(GrowFastDoubleElements,CodeStubAssembler)43 TF_BUILTIN(GrowFastDoubleElements, CodeStubAssembler) {
44 TNode<JSObject> object = CAST(Parameter(Descriptor::kObject));
45 TNode<Smi> key = CAST(Parameter(Descriptor::kKey));
46
47 Label runtime(this, Label::kDeferred);
48 TNode<FixedArrayBase> elements = LoadElements(object);
49 elements = TryGrowElementsCapacity(object, elements, PACKED_DOUBLE_ELEMENTS,
50 key, &runtime);
51 Return(elements);
52
53 BIND(&runtime);
54 TailCallRuntime(Runtime::kGrowArrayElements, NoContextConstant(), object,
55 key);
56 }
57
TF_BUILTIN(GrowFastSmiOrObjectElements,CodeStubAssembler)58 TF_BUILTIN(GrowFastSmiOrObjectElements, CodeStubAssembler) {
59 TNode<JSObject> object = CAST(Parameter(Descriptor::kObject));
60 TNode<Smi> key = CAST(Parameter(Descriptor::kKey));
61
62 Label runtime(this, Label::kDeferred);
63 TNode<FixedArrayBase> elements = LoadElements(object);
64 elements =
65 TryGrowElementsCapacity(object, elements, PACKED_ELEMENTS, key, &runtime);
66 Return(elements);
67
68 BIND(&runtime);
69 TailCallRuntime(Runtime::kGrowArrayElements, NoContextConstant(), object,
70 key);
71 }
72
TF_BUILTIN(NewArgumentsElements,CodeStubAssembler)73 TF_BUILTIN(NewArgumentsElements, CodeStubAssembler) {
74 TNode<IntPtrT> frame = UncheckedCast<IntPtrT>(Parameter(Descriptor::kFrame));
75 TNode<IntPtrT> length = SmiToIntPtr(Parameter(Descriptor::kLength));
76 TNode<IntPtrT> mapped_count =
77 SmiToIntPtr(Parameter(Descriptor::kMappedCount));
78
79 // Check if we can allocate in new space.
80 ElementsKind kind = PACKED_ELEMENTS;
81 int max_elements = FixedArray::GetMaxLengthForNewSpaceAllocation(kind);
82 Label if_newspace(this), if_oldspace(this, Label::kDeferred);
83 Branch(IntPtrLessThan(length, IntPtrConstant(max_elements)), &if_newspace,
84 &if_oldspace);
85
86 BIND(&if_newspace);
87 {
88 // Prefer EmptyFixedArray in case of non-positive {length} (the {length}
89 // can be negative here for rest parameters).
90 Label if_empty(this), if_notempty(this);
91 Branch(IntPtrLessThanOrEqual(length, IntPtrConstant(0)), &if_empty,
92 &if_notempty);
93
94 BIND(&if_empty);
95 Return(EmptyFixedArrayConstant());
96
97 BIND(&if_notempty);
98 {
99 // Allocate a FixedArray in new space.
100 TNode<FixedArray> result = CAST(AllocateFixedArray(kind, length));
101
102 // The elements might be used to back mapped arguments. In that case fill
103 // the mapped elements (i.e. the first {mapped_count}) with the hole, but
104 // make sure not to overshoot the {length} if some arguments are missing.
105 TNode<IntPtrT> number_of_holes = IntPtrMin(mapped_count, length);
106 TNode<Oddball> the_hole = TheHoleConstant();
107
108 // Fill the first elements up to {number_of_holes} with the hole.
109 TVARIABLE(IntPtrT, var_index, IntPtrConstant(0));
110 Label loop1(this, &var_index), done_loop1(this);
111 Goto(&loop1);
112 BIND(&loop1);
113 {
114 // Load the current {index}.
115 TNode<IntPtrT> index = var_index.value();
116
117 // Check if we are done.
118 GotoIf(IntPtrEqual(index, number_of_holes), &done_loop1);
119
120 // Store the hole into the {result}.
121 StoreFixedArrayElement(result, index, the_hole, SKIP_WRITE_BARRIER);
122
123 // Continue with next {index}.
124 var_index = IntPtrAdd(index, IntPtrConstant(1));
125 Goto(&loop1);
126 }
127 BIND(&done_loop1);
128
129 // Compute the effective {offset} into the {frame}.
130 TNode<IntPtrT> offset = IntPtrAdd(length, IntPtrConstant(1));
131
132 // Copy the parameters from {frame} (starting at {offset}) to {result}.
133 Label loop2(this, &var_index), done_loop2(this);
134 Goto(&loop2);
135 BIND(&loop2);
136 {
137 // Load the current {index}.
138 TNode<IntPtrT> index = var_index.value();
139
140 // Check if we are done.
141 GotoIf(IntPtrEqual(index, length), &done_loop2);
142
143 // Load the parameter at the given {index}.
144 TNode<Object> value = BitcastWordToTagged(
145 Load(MachineType::Pointer(), frame,
146 TimesSystemPointerSize(IntPtrSub(offset, index))));
147
148 // Store the {value} into the {result}.
149 StoreFixedArrayElement(result, index, value, SKIP_WRITE_BARRIER);
150
151 // Continue with next {index}.
152 var_index = IntPtrAdd(index, IntPtrConstant(1));
153 Goto(&loop2);
154 }
155 BIND(&done_loop2);
156
157 Return(result);
158 }
159 }
160
161 BIND(&if_oldspace);
162 {
163 // Allocate in old space (or large object space).
164 TailCallRuntime(Runtime::kNewArgumentsElements, NoContextConstant(),
165 BitcastWordToTagged(frame), SmiFromIntPtr(length),
166 SmiFromIntPtr(mapped_count));
167 }
168 }
169
TF_BUILTIN(ReturnReceiver,CodeStubAssembler)170 TF_BUILTIN(ReturnReceiver, CodeStubAssembler) {
171 TNode<Object> receiver = CAST(Parameter(Descriptor::kReceiver));
172 Return(receiver);
173 }
174
TF_BUILTIN(DebugBreakTrampoline,CodeStubAssembler)175 TF_BUILTIN(DebugBreakTrampoline, CodeStubAssembler) {
176 Label tailcall_to_shared(this);
177 TNode<Context> context = CAST(Parameter(Descriptor::kContext));
178 TNode<Object> new_target = CAST(Parameter(Descriptor::kJSNewTarget));
179 TNode<Int32T> arg_count =
180 UncheckedCast<Int32T>(Parameter(Descriptor::kJSActualArgumentsCount));
181 TNode<JSFunction> function = CAST(Parameter(Descriptor::kJSTarget));
182
183 // Check break-at-entry flag on the debug info.
184 TNode<SharedFunctionInfo> shared =
185 CAST(LoadObjectField(function, JSFunction::kSharedFunctionInfoOffset));
186 TNode<Object> maybe_heap_object_or_smi =
187 LoadObjectField(shared, SharedFunctionInfo::kScriptOrDebugInfoOffset);
188 TNode<HeapObject> maybe_debug_info =
189 TaggedToHeapObject(maybe_heap_object_or_smi, &tailcall_to_shared);
190 GotoIfNot(HasInstanceType(maybe_debug_info, InstanceType::DEBUG_INFO_TYPE),
191 &tailcall_to_shared);
192
193 {
194 TNode<DebugInfo> debug_info = CAST(maybe_debug_info);
195 TNode<Smi> flags =
196 CAST(LoadObjectField(debug_info, DebugInfo::kFlagsOffset));
197 GotoIfNot(SmiToInt32(SmiAnd(flags, SmiConstant(DebugInfo::kBreakAtEntry))),
198 &tailcall_to_shared);
199
200 CallRuntime(Runtime::kDebugBreakAtEntry, context, function);
201 Goto(&tailcall_to_shared);
202 }
203
204 BIND(&tailcall_to_shared);
205 // Tail call into code object on the SharedFunctionInfo.
206 TNode<Code> code = GetSharedFunctionInfoCode(shared);
207 TailCallJSCode(code, context, function, new_target, arg_count);
208 }
209
210 class RecordWriteCodeStubAssembler : public CodeStubAssembler {
211 public:
RecordWriteCodeStubAssembler(compiler::CodeAssemblerState * state)212 explicit RecordWriteCodeStubAssembler(compiler::CodeAssemblerState* state)
213 : CodeStubAssembler(state) {}
214
IsMarking()215 TNode<BoolT> IsMarking() {
216 TNode<ExternalReference> is_marking_addr = ExternalConstant(
217 ExternalReference::heap_is_marking_flag_address(this->isolate()));
218 return Word32NotEqual(Load<Uint8T>(is_marking_addr), Int32Constant(0));
219 }
220
IsPageFlagSet(TNode<IntPtrT> object,int mask)221 TNode<BoolT> IsPageFlagSet(TNode<IntPtrT> object, int mask) {
222 TNode<IntPtrT> page = PageFromAddress(object);
223 TNode<IntPtrT> flags =
224 UncheckedCast<IntPtrT>(Load(MachineType::Pointer(), page,
225 IntPtrConstant(MemoryChunk::kFlagsOffset)));
226 return WordNotEqual(WordAnd(flags, IntPtrConstant(mask)),
227 IntPtrConstant(0));
228 }
229
IsWhite(TNode<IntPtrT> object)230 TNode<BoolT> IsWhite(TNode<IntPtrT> object) {
231 DCHECK_EQ(strcmp(Marking::kWhiteBitPattern, "00"), 0);
232 TNode<IntPtrT> cell;
233 TNode<IntPtrT> mask;
234 GetMarkBit(object, &cell, &mask);
235 TNode<Int32T> mask32 = TruncateIntPtrToInt32(mask);
236 // Non-white has 1 for the first bit, so we only need to check for the first
237 // bit.
238 return Word32Equal(Word32And(Load<Int32T>(cell), mask32), Int32Constant(0));
239 }
240
GetMarkBit(TNode<IntPtrT> object,TNode<IntPtrT> * cell,TNode<IntPtrT> * mask)241 void GetMarkBit(TNode<IntPtrT> object, TNode<IntPtrT>* cell,
242 TNode<IntPtrT>* mask) {
243 TNode<IntPtrT> page = PageFromAddress(object);
244 TNode<IntPtrT> bitmap =
245 Load<IntPtrT>(page, IntPtrConstant(MemoryChunk::kMarkBitmapOffset));
246
247 {
248 // Temp variable to calculate cell offset in bitmap.
249 TNode<WordT> r0;
250 int shift = Bitmap::kBitsPerCellLog2 + kTaggedSizeLog2 -
251 Bitmap::kBytesPerCellLog2;
252 r0 = WordShr(object, IntPtrConstant(shift));
253 r0 = WordAnd(r0, IntPtrConstant((kPageAlignmentMask >> shift) &
254 ~(Bitmap::kBytesPerCell - 1)));
255 *cell = IntPtrAdd(bitmap, Signed(r0));
256 }
257 {
258 // Temp variable to calculate bit offset in cell.
259 TNode<WordT> r1;
260 r1 = WordShr(object, IntPtrConstant(kTaggedSizeLog2));
261 r1 = WordAnd(r1, IntPtrConstant((1 << Bitmap::kBitsPerCellLog2) - 1));
262 // It seems that LSB(e.g. cl) is automatically used, so no manual masking
263 // is needed. Uncomment the following line otherwise.
264 // WordAnd(r1, IntPtrConstant((1 << kBitsPerByte) - 1)));
265 *mask = WordShl(IntPtrConstant(1), r1);
266 }
267 }
268
ShouldSkipFPRegs(TNode<Smi> mode)269 TNode<BoolT> ShouldSkipFPRegs(TNode<Smi> mode) {
270 return TaggedEqual(mode, SmiConstant(kDontSaveFPRegs));
271 }
272
ShouldEmitRememberSet(TNode<Smi> remembered_set)273 TNode<BoolT> ShouldEmitRememberSet(TNode<Smi> remembered_set) {
274 return TaggedEqual(remembered_set, SmiConstant(EMIT_REMEMBERED_SET));
275 }
276
277 template <typename Ret, typename Arg0, typename Arg1>
CallCFunction2WithCallerSavedRegistersMode(TNode<ExternalReference> function,TNode<Arg0> arg0,TNode<Arg1> arg1,TNode<Smi> mode,Label * next)278 void CallCFunction2WithCallerSavedRegistersMode(
279 TNode<ExternalReference> function, TNode<Arg0> arg0, TNode<Arg1> arg1,
280 TNode<Smi> mode, Label* next) {
281 Label dont_save_fp(this), save_fp(this);
282 Branch(ShouldSkipFPRegs(mode), &dont_save_fp, &save_fp);
283 BIND(&dont_save_fp);
284 {
285 CallCFunctionWithCallerSavedRegisters(
286 function, MachineTypeOf<Ret>::value, kDontSaveFPRegs,
287 std::make_pair(MachineTypeOf<Arg0>::value, arg0),
288 std::make_pair(MachineTypeOf<Arg1>::value, arg1));
289 Goto(next);
290 }
291
292 BIND(&save_fp);
293 {
294 CallCFunctionWithCallerSavedRegisters(
295 function, MachineTypeOf<Ret>::value, kSaveFPRegs,
296 std::make_pair(MachineTypeOf<Arg0>::value, arg0),
297 std::make_pair(MachineTypeOf<Arg1>::value, arg1));
298 Goto(next);
299 }
300 }
301
302 template <typename Ret, typename Arg0, typename Arg1, typename Arg2>
CallCFunction3WithCallerSavedRegistersMode(TNode<ExternalReference> function,TNode<Arg0> arg0,TNode<Arg1> arg1,TNode<Arg2> arg2,TNode<Smi> mode,Label * next)303 void CallCFunction3WithCallerSavedRegistersMode(
304 TNode<ExternalReference> function, TNode<Arg0> arg0, TNode<Arg1> arg1,
305 TNode<Arg2> arg2, TNode<Smi> mode, Label* next) {
306 Label dont_save_fp(this), save_fp(this);
307 Branch(ShouldSkipFPRegs(mode), &dont_save_fp, &save_fp);
308 BIND(&dont_save_fp);
309 {
310 CallCFunctionWithCallerSavedRegisters(
311 function, MachineTypeOf<Ret>::value, kDontSaveFPRegs,
312 std::make_pair(MachineTypeOf<Arg0>::value, arg0),
313 std::make_pair(MachineTypeOf<Arg1>::value, arg1),
314 std::make_pair(MachineTypeOf<Arg2>::value, arg2));
315 Goto(next);
316 }
317
318 BIND(&save_fp);
319 {
320 CallCFunctionWithCallerSavedRegisters(
321 function, MachineTypeOf<Ret>::value, kSaveFPRegs,
322 std::make_pair(MachineTypeOf<Arg0>::value, arg0),
323 std::make_pair(MachineTypeOf<Arg1>::value, arg1),
324 std::make_pair(MachineTypeOf<Arg2>::value, arg2));
325 Goto(next);
326 }
327 }
328
InsertIntoRememberedSetAndGotoSlow(TNode<IntPtrT> object,TNode<IntPtrT> slot,TNode<Smi> mode,Label * next)329 void InsertIntoRememberedSetAndGotoSlow(TNode<IntPtrT> object,
330 TNode<IntPtrT> slot, TNode<Smi> mode,
331 Label* next) {
332 TNode<IntPtrT> page = PageFromAddress(object);
333 TNode<ExternalReference> function =
334 ExternalConstant(ExternalReference::insert_remembered_set_function());
335 CallCFunction2WithCallerSavedRegistersMode<Int32T, IntPtrT, IntPtrT>(
336 function, page, slot, mode, next);
337 }
338
InsertIntoRememberedSetAndGoto(TNode<IntPtrT> object,TNode<IntPtrT> slot,TNode<Smi> mode,Label * next)339 void InsertIntoRememberedSetAndGoto(TNode<IntPtrT> object,
340 TNode<IntPtrT> slot, TNode<Smi> mode,
341 Label* next) {
342 Label slow_path(this);
343 TNode<IntPtrT> page = PageFromAddress(object);
344
345 // Load address of SlotSet
346 TNode<IntPtrT> slot_set = LoadSlotSet(page, &slow_path);
347 TNode<IntPtrT> slot_offset = IntPtrSub(slot, page);
348
349 // Load bucket
350 TNode<IntPtrT> bucket = LoadBucket(slot_set, slot_offset, &slow_path);
351
352 // Update cell
353 SetBitInCell(bucket, slot_offset);
354
355 Goto(next);
356
357 BIND(&slow_path);
358 InsertIntoRememberedSetAndGotoSlow(object, slot, mode, next);
359 }
360
LoadSlotSet(TNode<IntPtrT> page,Label * slow_path)361 TNode<IntPtrT> LoadSlotSet(TNode<IntPtrT> page, Label* slow_path) {
362 TNode<IntPtrT> slot_set = UncheckedCast<IntPtrT>(
363 Load(MachineType::Pointer(), page,
364 IntPtrConstant(MemoryChunk::kOldToNewSlotSetOffset)));
365 GotoIf(WordEqual(slot_set, IntPtrConstant(0)), slow_path);
366
367 return slot_set;
368 }
369
LoadBucket(TNode<IntPtrT> slot_set,TNode<WordT> slot_offset,Label * slow_path)370 TNode<IntPtrT> LoadBucket(TNode<IntPtrT> slot_set, TNode<WordT> slot_offset,
371 Label* slow_path) {
372 TNode<WordT> bucket_index =
373 WordShr(slot_offset, SlotSet::kBitsPerBucketLog2 + kTaggedSizeLog2);
374 TNode<IntPtrT> bucket = UncheckedCast<IntPtrT>(
375 Load(MachineType::Pointer(), slot_set,
376 WordShl(bucket_index, kSystemPointerSizeLog2)));
377 GotoIf(WordEqual(bucket, IntPtrConstant(0)), slow_path);
378 return bucket;
379 }
380
SetBitInCell(TNode<IntPtrT> bucket,TNode<WordT> slot_offset)381 void SetBitInCell(TNode<IntPtrT> bucket, TNode<WordT> slot_offset) {
382 // Load cell value
383 TNode<WordT> cell_offset = WordAnd(
384 WordShr(slot_offset, SlotSet::kBitsPerCellLog2 + kTaggedSizeLog2 -
385 SlotSet::kCellSizeBytesLog2),
386 IntPtrConstant((SlotSet::kCellsPerBucket - 1)
387 << SlotSet::kCellSizeBytesLog2));
388 TNode<IntPtrT> cell_address =
389 UncheckedCast<IntPtrT>(IntPtrAdd(bucket, cell_offset));
390 TNode<IntPtrT> old_cell_value =
391 ChangeInt32ToIntPtr(Load<Int32T>(cell_address));
392
393 // Calculate new cell value
394 TNode<WordT> bit_index = WordAnd(WordShr(slot_offset, kTaggedSizeLog2),
395 IntPtrConstant(SlotSet::kBitsPerCell - 1));
396 TNode<IntPtrT> new_cell_value = UncheckedCast<IntPtrT>(
397 WordOr(old_cell_value, WordShl(IntPtrConstant(1), bit_index)));
398
399 // Update cell value
400 StoreNoWriteBarrier(MachineRepresentation::kWord32, cell_address,
401 TruncateIntPtrToInt32(new_cell_value));
402 }
403 };
404
TF_BUILTIN(RecordWrite,RecordWriteCodeStubAssembler)405 TF_BUILTIN(RecordWrite, RecordWriteCodeStubAssembler) {
406 Label generational_wb(this);
407 Label incremental_wb(this);
408 Label exit(this);
409
410 TNode<Smi> remembered_set =
411 UncheckedCast<Smi>(Parameter(Descriptor::kRememberedSet));
412 Branch(ShouldEmitRememberSet(remembered_set), &generational_wb,
413 &incremental_wb);
414
415 BIND(&generational_wb);
416 {
417 Label test_old_to_young_flags(this);
418 Label store_buffer_exit(this), store_buffer_incremental_wb(this);
419
420 // When incremental marking is not on, we skip cross generation pointer
421 // checking here, because there are checks for
422 // `kPointersFromHereAreInterestingMask` and
423 // `kPointersToHereAreInterestingMask` in
424 // `src/compiler/<arch>/code-generator-<arch>.cc` before calling this stub,
425 // which serves as the cross generation checking.
426 TNode<IntPtrT> slot = UncheckedCast<IntPtrT>(Parameter(Descriptor::kSlot));
427 Branch(IsMarking(), &test_old_to_young_flags, &store_buffer_exit);
428
429 BIND(&test_old_to_young_flags);
430 {
431 // TODO(ishell): do a new-space range check instead.
432 TNode<IntPtrT> value =
433 BitcastTaggedToWord(Load(MachineType::TaggedPointer(), slot));
434
435 // TODO(albertnetymk): Try to cache the page flag for value and object,
436 // instead of calling IsPageFlagSet each time.
437 TNode<BoolT> value_is_young =
438 IsPageFlagSet(value, MemoryChunk::kIsInYoungGenerationMask);
439 GotoIfNot(value_is_young, &incremental_wb);
440
441 TNode<IntPtrT> object =
442 BitcastTaggedToWord(Parameter(Descriptor::kObject));
443 TNode<BoolT> object_is_young =
444 IsPageFlagSet(object, MemoryChunk::kIsInYoungGenerationMask);
445 Branch(object_is_young, &incremental_wb, &store_buffer_incremental_wb);
446 }
447
448 BIND(&store_buffer_exit);
449 {
450 TNode<Smi> fp_mode = UncheckedCast<Smi>(Parameter(Descriptor::kFPMode));
451 TNode<IntPtrT> object =
452 BitcastTaggedToWord(Parameter(Descriptor::kObject));
453 InsertIntoRememberedSetAndGoto(object, slot, fp_mode, &exit);
454 }
455
456 BIND(&store_buffer_incremental_wb);
457 {
458 TNode<Smi> fp_mode = UncheckedCast<Smi>(Parameter(Descriptor::kFPMode));
459 TNode<IntPtrT> object =
460 BitcastTaggedToWord(Parameter(Descriptor::kObject));
461 InsertIntoRememberedSetAndGoto(object, slot, fp_mode, &incremental_wb);
462 }
463 }
464
465 BIND(&incremental_wb);
466 {
467 Label call_incremental_wb(this);
468
469 TNode<IntPtrT> slot = UncheckedCast<IntPtrT>(Parameter(Descriptor::kSlot));
470 TNode<IntPtrT> value =
471 BitcastTaggedToWord(Load(MachineType::TaggedPointer(), slot));
472
473 // There are two cases we need to call incremental write barrier.
474 // 1) value_is_white
475 GotoIf(IsWhite(value), &call_incremental_wb);
476
477 // 2) is_compacting && value_in_EC && obj_isnt_skip
478 // is_compacting = true when is_marking = true
479 GotoIfNot(IsPageFlagSet(value, MemoryChunk::kEvacuationCandidateMask),
480 &exit);
481
482 TNode<IntPtrT> object = BitcastTaggedToWord(Parameter(Descriptor::kObject));
483 Branch(
484 IsPageFlagSet(object, MemoryChunk::kSkipEvacuationSlotsRecordingMask),
485 &exit, &call_incremental_wb);
486
487 BIND(&call_incremental_wb);
488 {
489 TNode<ExternalReference> function = ExternalConstant(
490 ExternalReference::incremental_marking_record_write_function());
491 TNode<ExternalReference> isolate_constant =
492 ExternalConstant(ExternalReference::isolate_address(isolate()));
493 TNode<Smi> fp_mode = UncheckedCast<Smi>(Parameter(Descriptor::kFPMode));
494 TNode<IntPtrT> object =
495 BitcastTaggedToWord(Parameter(Descriptor::kObject));
496 CallCFunction3WithCallerSavedRegistersMode<Int32T, IntPtrT, IntPtrT,
497 ExternalReference>(
498 function, object, slot, isolate_constant, fp_mode, &exit);
499 }
500 }
501
502 BIND(&exit);
503 IncrementCounter(isolate()->counters()->write_barriers(), 1);
504 Return(TrueConstant());
505 }
506
TF_BUILTIN(EphemeronKeyBarrier,RecordWriteCodeStubAssembler)507 TF_BUILTIN(EphemeronKeyBarrier, RecordWriteCodeStubAssembler) {
508 Label exit(this);
509
510 TNode<ExternalReference> function = ExternalConstant(
511 ExternalReference::ephemeron_key_write_barrier_function());
512 TNode<ExternalReference> isolate_constant =
513 ExternalConstant(ExternalReference::isolate_address(isolate()));
514 TNode<IntPtrT> address =
515 UncheckedCast<IntPtrT>(Parameter(Descriptor::kSlotAddress));
516 TNode<IntPtrT> object = BitcastTaggedToWord(Parameter(Descriptor::kObject));
517 TNode<Smi> fp_mode = UncheckedCast<Smi>(Parameter(Descriptor::kFPMode));
518 CallCFunction3WithCallerSavedRegistersMode<Int32T, IntPtrT, IntPtrT,
519 ExternalReference>(
520 function, object, address, isolate_constant, fp_mode, &exit);
521
522 BIND(&exit);
523 IncrementCounter(isolate()->counters()->write_barriers(), 1);
524 Return(TrueConstant());
525 }
526
527 class DeletePropertyBaseAssembler : public AccessorAssembler {
528 public:
DeletePropertyBaseAssembler(compiler::CodeAssemblerState * state)529 explicit DeletePropertyBaseAssembler(compiler::CodeAssemblerState* state)
530 : AccessorAssembler(state) {}
531
DeleteDictionaryProperty(TNode<Object> receiver,TNode<NameDictionary> properties,TNode<Name> name,TNode<Context> context,Label * dont_delete,Label * notfound)532 void DeleteDictionaryProperty(TNode<Object> receiver,
533 TNode<NameDictionary> properties,
534 TNode<Name> name, TNode<Context> context,
535 Label* dont_delete, Label* notfound) {
536 TVARIABLE(IntPtrT, var_name_index);
537 Label dictionary_found(this, &var_name_index);
538 NameDictionaryLookup<NameDictionary>(properties, name, &dictionary_found,
539 &var_name_index, notfound);
540
541 BIND(&dictionary_found);
542 TNode<IntPtrT> key_index = var_name_index.value();
543 TNode<Uint32T> details = LoadDetailsByKeyIndex(properties, key_index);
544 GotoIf(IsSetWord32(details, PropertyDetails::kAttributesDontDeleteMask),
545 dont_delete);
546 // Overwrite the entry itself (see NameDictionary::SetEntry).
547 TNode<Oddball> filler = TheHoleConstant();
548 DCHECK(RootsTable::IsImmortalImmovable(RootIndex::kTheHoleValue));
549 StoreFixedArrayElement(properties, key_index, filler, SKIP_WRITE_BARRIER);
550 StoreValueByKeyIndex<NameDictionary>(properties, key_index, filler,
551 SKIP_WRITE_BARRIER);
552 StoreDetailsByKeyIndex<NameDictionary>(properties, key_index,
553 SmiConstant(0));
554
555 // Update bookkeeping information (see NameDictionary::ElementRemoved).
556 TNode<Smi> nof = GetNumberOfElements<NameDictionary>(properties);
557 TNode<Smi> new_nof = SmiSub(nof, SmiConstant(1));
558 SetNumberOfElements<NameDictionary>(properties, new_nof);
559 TNode<Smi> num_deleted =
560 GetNumberOfDeletedElements<NameDictionary>(properties);
561 TNode<Smi> new_deleted = SmiAdd(num_deleted, SmiConstant(1));
562 SetNumberOfDeletedElements<NameDictionary>(properties, new_deleted);
563
564 // Shrink the dictionary if necessary (see NameDictionary::Shrink).
565 Label shrinking_done(this);
566 TNode<Smi> capacity = GetCapacity<NameDictionary>(properties);
567 GotoIf(SmiGreaterThan(new_nof, SmiShr(capacity, 2)), &shrinking_done);
568 GotoIf(SmiLessThan(new_nof, SmiConstant(16)), &shrinking_done);
569 CallRuntime(Runtime::kShrinkPropertyDictionary, context, receiver);
570 Goto(&shrinking_done);
571 BIND(&shrinking_done);
572
573 Return(TrueConstant());
574 }
575 };
576
TF_BUILTIN(DeleteProperty,DeletePropertyBaseAssembler)577 TF_BUILTIN(DeleteProperty, DeletePropertyBaseAssembler) {
578 TNode<Object> receiver = CAST(Parameter(Descriptor::kObject));
579 TNode<Object> key = CAST(Parameter(Descriptor::kKey));
580 TNode<Smi> language_mode = CAST(Parameter(Descriptor::kLanguageMode));
581 TNode<Context> context = CAST(Parameter(Descriptor::kContext));
582
583 TVARIABLE(IntPtrT, var_index);
584 TVARIABLE(Name, var_unique);
585 Label if_index(this, &var_index), if_unique_name(this), if_notunique(this),
586 if_notfound(this), slow(this), if_proxy(this);
587
588 GotoIf(TaggedIsSmi(receiver), &slow);
589 TNode<Map> receiver_map = LoadMap(CAST(receiver));
590 TNode<Uint16T> instance_type = LoadMapInstanceType(receiver_map);
591 GotoIf(InstanceTypeEqual(instance_type, JS_PROXY_TYPE), &if_proxy);
592 GotoIf(IsCustomElementsReceiverInstanceType(instance_type), &slow);
593 TryToName(key, &if_index, &var_index, &if_unique_name, &var_unique, &slow,
594 &if_notunique);
595
596 BIND(&if_index);
597 {
598 Comment("integer index");
599 Goto(&slow); // TODO(jkummerow): Implement more smarts here.
600 }
601
602 BIND(&if_unique_name);
603 {
604 Comment("key is unique name");
605 CheckForAssociatedProtector(var_unique.value(), &slow);
606
607 Label dictionary(this), dont_delete(this);
608 GotoIf(IsDictionaryMap(receiver_map), &dictionary);
609
610 // Fast properties need to clear recorded slots, which can only be done
611 // in C++.
612 Goto(&slow);
613
614 BIND(&dictionary);
615 {
616 InvalidateValidityCellIfPrototype(receiver_map);
617
618 TNode<NameDictionary> properties =
619 CAST(LoadSlowProperties(CAST(receiver)));
620 DeleteDictionaryProperty(receiver, properties, var_unique.value(),
621 context, &dont_delete, &if_notfound);
622 }
623
624 BIND(&dont_delete);
625 {
626 STATIC_ASSERT(LanguageModeSize == 2);
627 GotoIf(SmiNotEqual(language_mode, SmiConstant(LanguageMode::kSloppy)),
628 &slow);
629 Return(FalseConstant());
630 }
631 }
632
633 BIND(&if_notunique);
634 {
635 // If the string was not found in the string table, then no object can
636 // have a property with that name.
637 TryInternalizeString(CAST(key), &if_index, &var_index, &if_unique_name,
638 &var_unique, &if_notfound, &slow);
639 }
640
641 BIND(&if_notfound);
642 Return(TrueConstant());
643
644 BIND(&if_proxy);
645 {
646 TNode<Name> name = CAST(CallBuiltin(Builtins::kToName, context, key));
647 GotoIf(IsPrivateSymbol(name), &slow);
648 TailCallBuiltin(Builtins::kProxyDeleteProperty, context, receiver, name,
649 language_mode);
650 }
651
652 BIND(&slow);
653 {
654 TailCallRuntime(Runtime::kDeleteProperty, context, receiver, key,
655 language_mode);
656 }
657 }
658
659 namespace {
660
661 class SetOrCopyDataPropertiesAssembler : public CodeStubAssembler {
662 public:
SetOrCopyDataPropertiesAssembler(compiler::CodeAssemblerState * state)663 explicit SetOrCopyDataPropertiesAssembler(compiler::CodeAssemblerState* state)
664 : CodeStubAssembler(state) {}
665
666 protected:
SetOrCopyDataProperties(TNode<Context> context,TNode<JSReceiver> target,TNode<Object> source,Label * if_runtime,bool use_set=true)667 TNode<Object> SetOrCopyDataProperties(TNode<Context> context,
668 TNode<JSReceiver> target,
669 TNode<Object> source, Label* if_runtime,
670 bool use_set = true) {
671 Label if_done(this), if_noelements(this),
672 if_sourcenotjsobject(this, Label::kDeferred);
673
674 // JSPrimitiveWrapper wrappers for numbers don't have any enumerable own
675 // properties, so we can immediately skip the whole operation if {source} is
676 // a Smi.
677 GotoIf(TaggedIsSmi(source), &if_done);
678
679 // Otherwise check if {source} is a proper JSObject, and if not, defer
680 // to testing for non-empty strings below.
681 TNode<Map> source_map = LoadMap(CAST(source));
682 TNode<Uint16T> source_instance_type = LoadMapInstanceType(source_map);
683 GotoIfNot(IsJSObjectInstanceType(source_instance_type),
684 &if_sourcenotjsobject);
685
686 TNode<FixedArrayBase> source_elements = LoadElements(CAST(source));
687 GotoIf(IsEmptyFixedArray(source_elements), &if_noelements);
688 Branch(IsEmptySlowElementDictionary(source_elements), &if_noelements,
689 if_runtime);
690
691 BIND(&if_noelements);
692 {
693 // If the target is deprecated, the object will be updated on first store.
694 // If the source for that store equals the target, this will invalidate
695 // the cached representation of the source. Handle this case in runtime.
696 TNode<Map> target_map = LoadMap(target);
697 GotoIf(IsDeprecatedMap(target_map), if_runtime);
698
699 if (use_set) {
700 TNode<BoolT> target_is_simple_receiver = IsSimpleObjectMap(target_map);
701 ForEachEnumerableOwnProperty(
702 context, source_map, CAST(source), kEnumerationOrder,
703 [=](TNode<Name> key, TNode<Object> value) {
704 KeyedStoreGenericGenerator::SetProperty(
705 state(), context, target, target_is_simple_receiver, key,
706 value, LanguageMode::kStrict);
707 },
708 if_runtime);
709 } else {
710 ForEachEnumerableOwnProperty(
711 context, source_map, CAST(source), kEnumerationOrder,
712 [=](TNode<Name> key, TNode<Object> value) {
713 CallBuiltin(Builtins::kSetPropertyInLiteral, context, target, key,
714 value);
715 },
716 if_runtime);
717 }
718 Goto(&if_done);
719 }
720
721 BIND(&if_sourcenotjsobject);
722 {
723 // Handle other JSReceivers in the runtime.
724 GotoIf(IsJSReceiverInstanceType(source_instance_type), if_runtime);
725
726 // Non-empty strings are the only non-JSReceivers that need to be
727 // handled explicitly by Object.assign() and CopyDataProperties.
728 GotoIfNot(IsStringInstanceType(source_instance_type), &if_done);
729 TNode<IntPtrT> source_length = LoadStringLengthAsWord(CAST(source));
730 Branch(IntPtrEqual(source_length, IntPtrConstant(0)), &if_done,
731 if_runtime);
732 }
733
734 BIND(&if_done);
735 return UndefinedConstant();
736 }
737 };
738
739 } // namespace
740
741 // ES #sec-copydataproperties
TF_BUILTIN(CopyDataProperties,SetOrCopyDataPropertiesAssembler)742 TF_BUILTIN(CopyDataProperties, SetOrCopyDataPropertiesAssembler) {
743 TNode<JSObject> target = CAST(Parameter(Descriptor::kTarget));
744 TNode<Object> source = CAST(Parameter(Descriptor::kSource));
745 TNode<Context> context = CAST(Parameter(Descriptor::kContext));
746
747 CSA_ASSERT(this, TaggedNotEqual(target, source));
748
749 Label if_runtime(this, Label::kDeferred);
750 Return(SetOrCopyDataProperties(context, target, source, &if_runtime, false));
751
752 BIND(&if_runtime);
753 TailCallRuntime(Runtime::kCopyDataProperties, context, target, source);
754 }
755
TF_BUILTIN(SetDataProperties,SetOrCopyDataPropertiesAssembler)756 TF_BUILTIN(SetDataProperties, SetOrCopyDataPropertiesAssembler) {
757 TNode<JSReceiver> target = CAST(Parameter(Descriptor::kTarget));
758 TNode<Object> source = CAST(Parameter(Descriptor::kSource));
759 TNode<Context> context = CAST(Parameter(Descriptor::kContext));
760
761 Label if_runtime(this, Label::kDeferred);
762 Return(SetOrCopyDataProperties(context, target, source, &if_runtime, true));
763
764 BIND(&if_runtime);
765 TailCallRuntime(Runtime::kSetDataProperties, context, target, source);
766 }
767
TF_BUILTIN(ForInEnumerate,CodeStubAssembler)768 TF_BUILTIN(ForInEnumerate, CodeStubAssembler) {
769 TNode<JSReceiver> receiver = CAST(Parameter(Descriptor::kReceiver));
770 TNode<Context> context = CAST(Parameter(Descriptor::kContext));
771
772 Label if_empty(this), if_runtime(this, Label::kDeferred);
773 TNode<Map> receiver_map = CheckEnumCache(receiver, &if_empty, &if_runtime);
774 Return(receiver_map);
775
776 BIND(&if_empty);
777 Return(EmptyFixedArrayConstant());
778
779 BIND(&if_runtime);
780 TailCallRuntime(Runtime::kForInEnumerate, context, receiver);
781 }
782
TF_BUILTIN(ForInFilter,CodeStubAssembler)783 TF_BUILTIN(ForInFilter, CodeStubAssembler) {
784 TNode<String> key = CAST(Parameter(Descriptor::kKey));
785 TNode<HeapObject> object = CAST(Parameter(Descriptor::kObject));
786 TNode<Context> context = CAST(Parameter(Descriptor::kContext));
787
788 Label if_true(this), if_false(this);
789 TNode<Oddball> result = HasProperty(context, object, key, kForInHasProperty);
790 Branch(IsTrue(result), &if_true, &if_false);
791
792 BIND(&if_true);
793 Return(key);
794
795 BIND(&if_false);
796 Return(UndefinedConstant());
797 }
798
TF_BUILTIN(SameValue,CodeStubAssembler)799 TF_BUILTIN(SameValue, CodeStubAssembler) {
800 TNode<Object> lhs = CAST(Parameter(Descriptor::kLeft));
801 TNode<Object> rhs = CAST(Parameter(Descriptor::kRight));
802
803 Label if_true(this), if_false(this);
804 BranchIfSameValue(lhs, rhs, &if_true, &if_false);
805
806 BIND(&if_true);
807 Return(TrueConstant());
808
809 BIND(&if_false);
810 Return(FalseConstant());
811 }
812
TF_BUILTIN(SameValueNumbersOnly,CodeStubAssembler)813 TF_BUILTIN(SameValueNumbersOnly, CodeStubAssembler) {
814 TNode<Object> lhs = CAST(Parameter(Descriptor::kLeft));
815 TNode<Object> rhs = CAST(Parameter(Descriptor::kRight));
816
817 Label if_true(this), if_false(this);
818 BranchIfSameValue(lhs, rhs, &if_true, &if_false, SameValueMode::kNumbersOnly);
819
820 BIND(&if_true);
821 Return(TrueConstant());
822
823 BIND(&if_false);
824 Return(FalseConstant());
825 }
826
TF_BUILTIN(AdaptorWithBuiltinExitFrame,CodeStubAssembler)827 TF_BUILTIN(AdaptorWithBuiltinExitFrame, CodeStubAssembler) {
828 TNode<JSFunction> target = CAST(Parameter(Descriptor::kTarget));
829 TNode<Object> new_target = CAST(Parameter(Descriptor::kNewTarget));
830 TNode<WordT> c_function =
831 UncheckedCast<WordT>(Parameter(Descriptor::kCFunction));
832
833 // The logic contained here is mirrored for TurboFan inlining in
834 // JSTypedLowering::ReduceJSCall{Function,Construct}. Keep these in sync.
835
836 // Make sure we operate in the context of the called function (for example
837 // ConstructStubs implemented in C++ will be run in the context of the caller
838 // instead of the callee, due to the way that [[Construct]] is defined for
839 // ordinary functions).
840 TNode<Context> context =
841 CAST(LoadObjectField(target, JSFunction::kContextOffset));
842
843 // Update arguments count for CEntry to contain the number of arguments
844 // including the receiver and the extra arguments.
845 TNode<Int32T> argc =
846 UncheckedCast<Int32T>(Parameter(Descriptor::kActualArgumentsCount));
847 argc = Int32Add(
848 argc,
849 Int32Constant(BuiltinExitFrameConstants::kNumExtraArgsWithReceiver));
850
851 const bool builtin_exit_frame = true;
852 TNode<Code> code = HeapConstant(CodeFactory::CEntry(
853 isolate(), 1, kDontSaveFPRegs, kArgvOnStack, builtin_exit_frame));
854
855 // Unconditionally push argc, target and new target as extra stack arguments.
856 // They will be used by stack frame iterators when constructing stack trace.
857 TailCallStub(CEntry1ArgvOnStackDescriptor{}, // descriptor
858 code, context, // standard arguments for TailCallStub
859 argc, c_function, // register arguments
860 TheHoleConstant(), // additional stack argument 1 (padding)
861 SmiFromInt32(argc), // additional stack argument 2
862 target, // additional stack argument 3
863 new_target); // additional stack argument 4
864 }
865
TF_BUILTIN(AllocateInYoungGeneration,CodeStubAssembler)866 TF_BUILTIN(AllocateInYoungGeneration, CodeStubAssembler) {
867 TNode<IntPtrT> requested_size =
868 UncheckedCast<IntPtrT>(Parameter(Descriptor::kRequestedSize));
869 CSA_CHECK(this, IsValidPositiveSmi(requested_size));
870
871 TNode<Smi> allocation_flags =
872 SmiConstant(Smi::FromInt(AllocateDoubleAlignFlag::encode(false) |
873 AllowLargeObjectAllocationFlag::encode(true)));
874 TailCallRuntime(Runtime::kAllocateInYoungGeneration, NoContextConstant(),
875 SmiFromIntPtr(requested_size), allocation_flags);
876 }
877
TF_BUILTIN(AllocateRegularInYoungGeneration,CodeStubAssembler)878 TF_BUILTIN(AllocateRegularInYoungGeneration, CodeStubAssembler) {
879 TNode<IntPtrT> requested_size =
880 UncheckedCast<IntPtrT>(Parameter(Descriptor::kRequestedSize));
881 CSA_CHECK(this, IsValidPositiveSmi(requested_size));
882
883 TNode<Smi> allocation_flags =
884 SmiConstant(Smi::FromInt(AllocateDoubleAlignFlag::encode(false) |
885 AllowLargeObjectAllocationFlag::encode(false)));
886 TailCallRuntime(Runtime::kAllocateInYoungGeneration, NoContextConstant(),
887 SmiFromIntPtr(requested_size), allocation_flags);
888 }
889
TF_BUILTIN(AllocateInOldGeneration,CodeStubAssembler)890 TF_BUILTIN(AllocateInOldGeneration, CodeStubAssembler) {
891 TNode<IntPtrT> requested_size =
892 UncheckedCast<IntPtrT>(Parameter(Descriptor::kRequestedSize));
893 CSA_CHECK(this, IsValidPositiveSmi(requested_size));
894
895 TNode<Smi> runtime_flags =
896 SmiConstant(Smi::FromInt(AllocateDoubleAlignFlag::encode(false) |
897 AllowLargeObjectAllocationFlag::encode(true)));
898 TailCallRuntime(Runtime::kAllocateInOldGeneration, NoContextConstant(),
899 SmiFromIntPtr(requested_size), runtime_flags);
900 }
901
TF_BUILTIN(AllocateRegularInOldGeneration,CodeStubAssembler)902 TF_BUILTIN(AllocateRegularInOldGeneration, CodeStubAssembler) {
903 TNode<IntPtrT> requested_size =
904 UncheckedCast<IntPtrT>(Parameter(Descriptor::kRequestedSize));
905 CSA_CHECK(this, IsValidPositiveSmi(requested_size));
906
907 TNode<Smi> runtime_flags =
908 SmiConstant(Smi::FromInt(AllocateDoubleAlignFlag::encode(false) |
909 AllowLargeObjectAllocationFlag::encode(false)));
910 TailCallRuntime(Runtime::kAllocateInOldGeneration, NoContextConstant(),
911 SmiFromIntPtr(requested_size), runtime_flags);
912 }
913
TF_BUILTIN(Abort,CodeStubAssembler)914 TF_BUILTIN(Abort, CodeStubAssembler) {
915 TNode<Smi> message_id = CAST(Parameter(Descriptor::kMessageOrMessageId));
916 TailCallRuntime(Runtime::kAbort, NoContextConstant(), message_id);
917 }
918
TF_BUILTIN(AbortCSAAssert,CodeStubAssembler)919 TF_BUILTIN(AbortCSAAssert, CodeStubAssembler) {
920 TNode<String> message = CAST(Parameter(Descriptor::kMessageOrMessageId));
921 TailCallRuntime(Runtime::kAbortCSAAssert, NoContextConstant(), message);
922 }
923
Generate_CEntry_Return1_DontSaveFPRegs_ArgvOnStack_NoBuiltinExit(MacroAssembler * masm)924 void Builtins::Generate_CEntry_Return1_DontSaveFPRegs_ArgvOnStack_NoBuiltinExit(
925 MacroAssembler* masm) {
926 Generate_CEntry(masm, 1, kDontSaveFPRegs, kArgvOnStack, false);
927 }
928
Generate_CEntry_Return1_DontSaveFPRegs_ArgvOnStack_BuiltinExit(MacroAssembler * masm)929 void Builtins::Generate_CEntry_Return1_DontSaveFPRegs_ArgvOnStack_BuiltinExit(
930 MacroAssembler* masm) {
931 Generate_CEntry(masm, 1, kDontSaveFPRegs, kArgvOnStack, true);
932 }
933
934 void Builtins::
Generate_CEntry_Return1_DontSaveFPRegs_ArgvInRegister_NoBuiltinExit(MacroAssembler * masm)935 Generate_CEntry_Return1_DontSaveFPRegs_ArgvInRegister_NoBuiltinExit(
936 MacroAssembler* masm) {
937 Generate_CEntry(masm, 1, kDontSaveFPRegs, kArgvInRegister, false);
938 }
939
Generate_CEntry_Return1_SaveFPRegs_ArgvOnStack_NoBuiltinExit(MacroAssembler * masm)940 void Builtins::Generate_CEntry_Return1_SaveFPRegs_ArgvOnStack_NoBuiltinExit(
941 MacroAssembler* masm) {
942 Generate_CEntry(masm, 1, kSaveFPRegs, kArgvOnStack, false);
943 }
944
Generate_CEntry_Return1_SaveFPRegs_ArgvOnStack_BuiltinExit(MacroAssembler * masm)945 void Builtins::Generate_CEntry_Return1_SaveFPRegs_ArgvOnStack_BuiltinExit(
946 MacroAssembler* masm) {
947 Generate_CEntry(masm, 1, kSaveFPRegs, kArgvOnStack, true);
948 }
949
Generate_CEntry_Return2_DontSaveFPRegs_ArgvOnStack_NoBuiltinExit(MacroAssembler * masm)950 void Builtins::Generate_CEntry_Return2_DontSaveFPRegs_ArgvOnStack_NoBuiltinExit(
951 MacroAssembler* masm) {
952 Generate_CEntry(masm, 2, kDontSaveFPRegs, kArgvOnStack, false);
953 }
954
Generate_CEntry_Return2_DontSaveFPRegs_ArgvOnStack_BuiltinExit(MacroAssembler * masm)955 void Builtins::Generate_CEntry_Return2_DontSaveFPRegs_ArgvOnStack_BuiltinExit(
956 MacroAssembler* masm) {
957 Generate_CEntry(masm, 2, kDontSaveFPRegs, kArgvOnStack, true);
958 }
959
960 void Builtins::
Generate_CEntry_Return2_DontSaveFPRegs_ArgvInRegister_NoBuiltinExit(MacroAssembler * masm)961 Generate_CEntry_Return2_DontSaveFPRegs_ArgvInRegister_NoBuiltinExit(
962 MacroAssembler* masm) {
963 Generate_CEntry(masm, 2, kDontSaveFPRegs, kArgvInRegister, false);
964 }
965
Generate_CEntry_Return2_SaveFPRegs_ArgvOnStack_NoBuiltinExit(MacroAssembler * masm)966 void Builtins::Generate_CEntry_Return2_SaveFPRegs_ArgvOnStack_NoBuiltinExit(
967 MacroAssembler* masm) {
968 Generate_CEntry(masm, 2, kSaveFPRegs, kArgvOnStack, false);
969 }
970
Generate_CEntry_Return2_SaveFPRegs_ArgvOnStack_BuiltinExit(MacroAssembler * masm)971 void Builtins::Generate_CEntry_Return2_SaveFPRegs_ArgvOnStack_BuiltinExit(
972 MacroAssembler* masm) {
973 Generate_CEntry(masm, 2, kSaveFPRegs, kArgvOnStack, true);
974 }
975
976 #if !defined(V8_TARGET_ARCH_ARM) && !defined(V8_TARGET_ARCH_MIPS)
Generate_MemCopyUint8Uint8(MacroAssembler * masm)977 void Builtins::Generate_MemCopyUint8Uint8(MacroAssembler* masm) {
978 masm->Call(BUILTIN_CODE(masm->isolate(), Illegal), RelocInfo::CODE_TARGET);
979 }
980 #endif // !defined(V8_TARGET_ARCH_ARM) && !defined(V8_TARGET_ARCH_MIPS)
981
982 #ifndef V8_TARGET_ARCH_IA32
Generate_MemMove(MacroAssembler * masm)983 void Builtins::Generate_MemMove(MacroAssembler* masm) {
984 masm->Call(BUILTIN_CODE(masm->isolate(), Illegal), RelocInfo::CODE_TARGET);
985 }
986 #endif // V8_TARGET_ARCH_IA32
987
988 // ES6 [[Get]] operation.
TF_BUILTIN(GetProperty,CodeStubAssembler)989 TF_BUILTIN(GetProperty, CodeStubAssembler) {
990 TNode<Object> object = CAST(Parameter(Descriptor::kObject));
991 TNode<Object> key = CAST(Parameter(Descriptor::kKey));
992 TNode<Context> context = CAST(Parameter(Descriptor::kContext));
993 // TODO(duongn): consider tailcalling to GetPropertyWithReceiver(object,
994 // object, key, OnNonExistent::kReturnUndefined).
995 Label if_notfound(this), if_proxy(this, Label::kDeferred),
996 if_slow(this, Label::kDeferred);
997
998 CodeStubAssembler::LookupPropertyInHolder lookup_property_in_holder =
999 [=](TNode<HeapObject> receiver, TNode<HeapObject> holder,
1000 TNode<Map> holder_map, TNode<Int32T> holder_instance_type,
1001 TNode<Name> unique_name, Label* next_holder, Label* if_bailout) {
1002 TVARIABLE(Object, var_value);
1003 Label if_found(this);
1004 TryGetOwnProperty(context, receiver, CAST(holder), holder_map,
1005 holder_instance_type, unique_name, &if_found,
1006 &var_value, next_holder, if_bailout);
1007 BIND(&if_found);
1008 Return(var_value.value());
1009 };
1010
1011 CodeStubAssembler::LookupElementInHolder lookup_element_in_holder =
1012 [=](TNode<HeapObject> receiver, TNode<HeapObject> holder,
1013 TNode<Map> holder_map, TNode<Int32T> holder_instance_type,
1014 TNode<IntPtrT> index, Label* next_holder, Label* if_bailout) {
1015 // Not supported yet.
1016 Use(next_holder);
1017 Goto(if_bailout);
1018 };
1019
1020 TryPrototypeChainLookup(object, object, key, lookup_property_in_holder,
1021 lookup_element_in_holder, &if_notfound, &if_slow,
1022 &if_proxy);
1023
1024 BIND(&if_notfound);
1025 Return(UndefinedConstant());
1026
1027 BIND(&if_slow);
1028 TailCallRuntime(Runtime::kGetProperty, context, object, key);
1029
1030 BIND(&if_proxy);
1031 {
1032 // Convert the {key} to a Name first.
1033 TNode<Object> name = CallBuiltin(Builtins::kToName, context, key);
1034
1035 // The {object} is a JSProxy instance, look up the {name} on it, passing
1036 // {object} both as receiver and holder. If {name} is absent we can safely
1037 // return undefined from here.
1038 TailCallBuiltin(Builtins::kProxyGetProperty, context, object, name, object,
1039 SmiConstant(OnNonExistent::kReturnUndefined));
1040 }
1041 }
1042
1043 // ES6 [[Get]] operation with Receiver.
TF_BUILTIN(GetPropertyWithReceiver,CodeStubAssembler)1044 TF_BUILTIN(GetPropertyWithReceiver, CodeStubAssembler) {
1045 TNode<Object> object = CAST(Parameter(Descriptor::kObject));
1046 TNode<Object> key = CAST(Parameter(Descriptor::kKey));
1047 TNode<Context> context = CAST(Parameter(Descriptor::kContext));
1048 TNode<Object> receiver = CAST(Parameter(Descriptor::kReceiver));
1049 TNode<Object> on_non_existent = CAST(Parameter(Descriptor::kOnNonExistent));
1050 Label if_notfound(this), if_proxy(this, Label::kDeferred),
1051 if_slow(this, Label::kDeferred);
1052
1053 CodeStubAssembler::LookupPropertyInHolder lookup_property_in_holder =
1054 [=](TNode<HeapObject> receiver, TNode<HeapObject> holder,
1055 TNode<Map> holder_map, TNode<Int32T> holder_instance_type,
1056 TNode<Name> unique_name, Label* next_holder, Label* if_bailout) {
1057 TVARIABLE(Object, var_value);
1058 Label if_found(this);
1059 TryGetOwnProperty(context, receiver, CAST(holder), holder_map,
1060 holder_instance_type, unique_name, &if_found,
1061 &var_value, next_holder, if_bailout);
1062 BIND(&if_found);
1063 Return(var_value.value());
1064 };
1065
1066 CodeStubAssembler::LookupElementInHolder lookup_element_in_holder =
1067 [=](TNode<HeapObject> receiver, TNode<HeapObject> holder,
1068 TNode<Map> holder_map, TNode<Int32T> holder_instance_type,
1069 TNode<IntPtrT> index, Label* next_holder, Label* if_bailout) {
1070 // Not supported yet.
1071 Use(next_holder);
1072 Goto(if_bailout);
1073 };
1074
1075 TryPrototypeChainLookup(receiver, object, key, lookup_property_in_holder,
1076 lookup_element_in_holder, &if_notfound, &if_slow,
1077 &if_proxy);
1078
1079 BIND(&if_notfound);
1080 Label throw_reference_error(this);
1081 GotoIf(TaggedEqual(on_non_existent,
1082 SmiConstant(OnNonExistent::kThrowReferenceError)),
1083 &throw_reference_error);
1084 CSA_ASSERT(this, TaggedEqual(on_non_existent,
1085 SmiConstant(OnNonExistent::kReturnUndefined)));
1086 Return(UndefinedConstant());
1087
1088 BIND(&throw_reference_error);
1089 Return(CallRuntime(Runtime::kThrowReferenceError, context, key));
1090
1091 BIND(&if_slow);
1092 TailCallRuntime(Runtime::kGetPropertyWithReceiver, context, object, key,
1093 receiver, on_non_existent);
1094
1095 BIND(&if_proxy);
1096 {
1097 // Convert the {key} to a Name first.
1098 TNode<Name> name = CAST(CallBuiltin(Builtins::kToName, context, key));
1099
1100 // Proxy cannot handle private symbol so bailout.
1101 GotoIf(IsPrivateSymbol(name), &if_slow);
1102
1103 // The {object} is a JSProxy instance, look up the {name} on it, passing
1104 // {object} both as receiver and holder. If {name} is absent we can safely
1105 // return undefined from here.
1106 TailCallBuiltin(Builtins::kProxyGetProperty, context, object, name,
1107 receiver, on_non_existent);
1108 }
1109 }
1110
1111 // ES6 [[Set]] operation.
TF_BUILTIN(SetProperty,CodeStubAssembler)1112 TF_BUILTIN(SetProperty, CodeStubAssembler) {
1113 TNode<Context> context = CAST(Parameter(Descriptor::kContext));
1114 TNode<Object> receiver = CAST(Parameter(Descriptor::kReceiver));
1115 TNode<Object> key = CAST(Parameter(Descriptor::kKey));
1116 TNode<Object> value = CAST(Parameter(Descriptor::kValue));
1117
1118 KeyedStoreGenericGenerator::SetProperty(state(), context, receiver, key,
1119 value, LanguageMode::kStrict);
1120 }
1121
1122 // ES6 CreateDataProperty(), specialized for the case where objects are still
1123 // being initialized, and have not yet been made accessible to the user. Thus,
1124 // any operation here should be unobservable until after the object has been
1125 // returned.
TF_BUILTIN(SetPropertyInLiteral,CodeStubAssembler)1126 TF_BUILTIN(SetPropertyInLiteral, CodeStubAssembler) {
1127 TNode<Context> context = CAST(Parameter(Descriptor::kContext));
1128 TNode<JSObject> receiver = CAST(Parameter(Descriptor::kReceiver));
1129 TNode<Object> key = CAST(Parameter(Descriptor::kKey));
1130 TNode<Object> value = CAST(Parameter(Descriptor::kValue));
1131
1132 KeyedStoreGenericGenerator::SetPropertyInLiteral(state(), context, receiver,
1133 key, value);
1134 }
1135
TF_BUILTIN(InstantiateAsmJs,CodeStubAssembler)1136 TF_BUILTIN(InstantiateAsmJs, CodeStubAssembler) {
1137 Label tailcall_to_function(this);
1138 TNode<Context> context = CAST(Parameter(Descriptor::kContext));
1139 TNode<Object> new_target = CAST(Parameter(Descriptor::kNewTarget));
1140 TNode<Int32T> arg_count =
1141 UncheckedCast<Int32T>(Parameter(Descriptor::kActualArgumentsCount));
1142 TNode<JSFunction> function = CAST(Parameter(Descriptor::kTarget));
1143
1144 // Retrieve arguments from caller (stdlib, foreign, heap).
1145 CodeStubArguments args(this, arg_count);
1146 TNode<Object> stdlib = args.GetOptionalArgumentValue(0);
1147 TNode<Object> foreign = args.GetOptionalArgumentValue(1);
1148 TNode<Object> heap = args.GetOptionalArgumentValue(2);
1149
1150 // Call runtime, on success just pass the result to the caller and pop all
1151 // arguments. A smi 0 is returned on failure, an object on success.
1152 TNode<Object> maybe_result_or_smi_zero = CallRuntime(
1153 Runtime::kInstantiateAsmJs, context, function, stdlib, foreign, heap);
1154 GotoIf(TaggedIsSmi(maybe_result_or_smi_zero), &tailcall_to_function);
1155 args.PopAndReturn(maybe_result_or_smi_zero);
1156
1157 BIND(&tailcall_to_function);
1158 // On failure, tail call back to regular JavaScript by re-calling the given
1159 // function which has been reset to the compile lazy builtin.
1160 TNode<Code> code = CAST(LoadObjectField(function, JSFunction::kCodeOffset));
1161 TailCallJSCode(code, context, function, new_target, arg_count);
1162 }
1163
1164 } // namespace internal
1165 } // namespace v8
1166