1 // Copyright 2017 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/api/api.h"
6 #include "src/baseline/baseline.h"
7 #include "src/builtins/builtins-utils-gen.h"
8 #include "src/builtins/builtins.h"
9 #include "src/codegen/code-stub-assembler.h"
10 #include "src/codegen/interface-descriptors-inl.h"
11 #include "src/codegen/macro-assembler.h"
12 #include "src/common/globals.h"
13 #include "src/execution/frame-constants.h"
14 #include "src/heap/memory-chunk.h"
15 #include "src/ic/accessor-assembler.h"
16 #include "src/ic/keyed-store-generic.h"
17 #include "src/logging/counters.h"
18 #include "src/objects/debug-objects.h"
19 #include "src/objects/shared-function-info.h"
20 #include "src/runtime/runtime.h"
21
22 namespace v8 {
23 namespace internal {
24
25 // -----------------------------------------------------------------------------
26 // Stack checks.
27
Generate_StackCheck(MacroAssembler * masm)28 void Builtins::Generate_StackCheck(MacroAssembler* masm) {
29 masm->TailCallRuntime(Runtime::kStackGuard);
30 }
31
32 // -----------------------------------------------------------------------------
33 // TurboFan support builtins.
34
TF_BUILTIN(CopyFastSmiOrObjectElements,CodeStubAssembler)35 TF_BUILTIN(CopyFastSmiOrObjectElements, CodeStubAssembler) {
36 auto js_object = Parameter<JSObject>(Descriptor::kObject);
37
38 // Load the {object}s elements.
39 TNode<FixedArrayBase> source =
40 CAST(LoadObjectField(js_object, JSObject::kElementsOffset));
41 TNode<FixedArrayBase> target =
42 CloneFixedArray(source, ExtractFixedArrayFlag::kFixedArrays);
43 StoreObjectField(js_object, JSObject::kElementsOffset, target);
44 Return(target);
45 }
46
TF_BUILTIN(GrowFastDoubleElements,CodeStubAssembler)47 TF_BUILTIN(GrowFastDoubleElements, CodeStubAssembler) {
48 auto object = Parameter<JSObject>(Descriptor::kObject);
49 auto key = Parameter<Smi>(Descriptor::kKey);
50
51 Label runtime(this, Label::kDeferred);
52 TNode<FixedArrayBase> elements = LoadElements(object);
53 elements = TryGrowElementsCapacity(object, elements, PACKED_DOUBLE_ELEMENTS,
54 key, &runtime);
55 Return(elements);
56
57 BIND(&runtime);
58 TailCallRuntime(Runtime::kGrowArrayElements, NoContextConstant(), object,
59 key);
60 }
61
TF_BUILTIN(GrowFastSmiOrObjectElements,CodeStubAssembler)62 TF_BUILTIN(GrowFastSmiOrObjectElements, CodeStubAssembler) {
63 auto object = Parameter<JSObject>(Descriptor::kObject);
64 auto key = Parameter<Smi>(Descriptor::kKey);
65
66 Label runtime(this, Label::kDeferred);
67 TNode<FixedArrayBase> elements = LoadElements(object);
68 elements =
69 TryGrowElementsCapacity(object, elements, PACKED_ELEMENTS, key, &runtime);
70 Return(elements);
71
72 BIND(&runtime);
73 TailCallRuntime(Runtime::kGrowArrayElements, NoContextConstant(), object,
74 key);
75 }
76
TF_BUILTIN(ReturnReceiver,CodeStubAssembler)77 TF_BUILTIN(ReturnReceiver, CodeStubAssembler) {
78 auto receiver = Parameter<Object>(Descriptor::kReceiver);
79 Return(receiver);
80 }
81
TF_BUILTIN(DebugBreakTrampoline,CodeStubAssembler)82 TF_BUILTIN(DebugBreakTrampoline, CodeStubAssembler) {
83 Label tailcall_to_shared(this);
84 auto context = Parameter<Context>(Descriptor::kContext);
85 auto new_target = Parameter<Object>(Descriptor::kJSNewTarget);
86 auto arg_count =
87 UncheckedParameter<Int32T>(Descriptor::kJSActualArgumentsCount);
88 auto function = Parameter<JSFunction>(Descriptor::kJSTarget);
89
90 // Check break-at-entry flag on the debug info.
91 TNode<SharedFunctionInfo> shared =
92 CAST(LoadObjectField(function, JSFunction::kSharedFunctionInfoOffset));
93 TNode<Object> maybe_heap_object_or_smi =
94 LoadObjectField(shared, SharedFunctionInfo::kScriptOrDebugInfoOffset);
95 TNode<HeapObject> maybe_debug_info =
96 TaggedToHeapObject(maybe_heap_object_or_smi, &tailcall_to_shared);
97 GotoIfNot(HasInstanceType(maybe_debug_info, InstanceType::DEBUG_INFO_TYPE),
98 &tailcall_to_shared);
99
100 {
101 TNode<DebugInfo> debug_info = CAST(maybe_debug_info);
102 TNode<Smi> flags =
103 CAST(LoadObjectField(debug_info, DebugInfo::kFlagsOffset));
104 GotoIfNot(SmiToInt32(SmiAnd(flags, SmiConstant(DebugInfo::kBreakAtEntry))),
105 &tailcall_to_shared);
106
107 CallRuntime(Runtime::kDebugBreakAtEntry, context, function);
108 Goto(&tailcall_to_shared);
109 }
110
111 BIND(&tailcall_to_shared);
112 // Tail call into code object on the SharedFunctionInfo.
113 TNode<Code> code = GetSharedFunctionInfoCode(shared);
114 TailCallJSCode(code, context, function, new_target, arg_count);
115 }
116
117 class WriteBarrierCodeStubAssembler : public CodeStubAssembler {
118 public:
WriteBarrierCodeStubAssembler(compiler::CodeAssemblerState * state)119 explicit WriteBarrierCodeStubAssembler(compiler::CodeAssemblerState* state)
120 : CodeStubAssembler(state) {}
121
IsMarking()122 TNode<BoolT> IsMarking() {
123 TNode<ExternalReference> is_marking_addr = ExternalConstant(
124 ExternalReference::heap_is_marking_flag_address(this->isolate()));
125 return Word32NotEqual(Load<Uint8T>(is_marking_addr), Int32Constant(0));
126 }
127
IsPageFlagSet(TNode<IntPtrT> object,int mask)128 TNode<BoolT> IsPageFlagSet(TNode<IntPtrT> object, int mask) {
129 TNode<IntPtrT> page = PageFromAddress(object);
130 TNode<IntPtrT> flags = UncheckedCast<IntPtrT>(
131 Load(MachineType::Pointer(), page,
132 IntPtrConstant(BasicMemoryChunk::kFlagsOffset)));
133 return WordNotEqual(WordAnd(flags, IntPtrConstant(mask)),
134 IntPtrConstant(0));
135 }
136
IsWhite(TNode<IntPtrT> object)137 TNode<BoolT> IsWhite(TNode<IntPtrT> object) {
138 DCHECK_EQ(strcmp(Marking::kWhiteBitPattern, "00"), 0);
139 TNode<IntPtrT> cell;
140 TNode<IntPtrT> mask;
141 GetMarkBit(object, &cell, &mask);
142 TNode<Int32T> mask32 = TruncateIntPtrToInt32(mask);
143 // Non-white has 1 for the first bit, so we only need to check for the first
144 // bit.
145 return Word32Equal(Word32And(Load<Int32T>(cell), mask32), Int32Constant(0));
146 }
147
GetMarkBit(TNode<IntPtrT> object,TNode<IntPtrT> * cell,TNode<IntPtrT> * mask)148 void GetMarkBit(TNode<IntPtrT> object, TNode<IntPtrT>* cell,
149 TNode<IntPtrT>* mask) {
150 TNode<IntPtrT> page = PageFromAddress(object);
151 TNode<IntPtrT> bitmap =
152 IntPtrAdd(page, IntPtrConstant(MemoryChunk::kMarkingBitmapOffset));
153
154 {
155 // Temp variable to calculate cell offset in bitmap.
156 TNode<WordT> r0;
157 int shift = Bitmap::kBitsPerCellLog2 + kTaggedSizeLog2 -
158 Bitmap::kBytesPerCellLog2;
159 r0 = WordShr(object, IntPtrConstant(shift));
160 r0 = WordAnd(r0, IntPtrConstant((kPageAlignmentMask >> shift) &
161 ~(Bitmap::kBytesPerCell - 1)));
162 *cell = IntPtrAdd(bitmap, Signed(r0));
163 }
164 {
165 // Temp variable to calculate bit offset in cell.
166 TNode<WordT> r1;
167 r1 = WordShr(object, IntPtrConstant(kTaggedSizeLog2));
168 r1 = WordAnd(r1, IntPtrConstant((1 << Bitmap::kBitsPerCellLog2) - 1));
169 // It seems that LSB(e.g. cl) is automatically used, so no manual masking
170 // is needed. Uncomment the following line otherwise.
171 // WordAnd(r1, IntPtrConstant((1 << kBitsPerByte) - 1)));
172 *mask = WordShl(IntPtrConstant(1), r1);
173 }
174 }
175
InsertIntoRememberedSet(TNode<IntPtrT> object,TNode<IntPtrT> slot,SaveFPRegsMode fp_mode)176 void InsertIntoRememberedSet(TNode<IntPtrT> object, TNode<IntPtrT> slot,
177 SaveFPRegsMode fp_mode) {
178 Label slow_path(this), next(this);
179 TNode<IntPtrT> page = PageFromAddress(object);
180
181 // Load address of SlotSet
182 TNode<IntPtrT> slot_set = LoadSlotSet(page, &slow_path);
183 TNode<IntPtrT> slot_offset = IntPtrSub(slot, page);
184
185 // Load bucket
186 TNode<IntPtrT> bucket = LoadBucket(slot_set, slot_offset, &slow_path);
187
188 // Update cell
189 SetBitInCell(bucket, slot_offset);
190 Goto(&next);
191
192 BIND(&slow_path);
193 {
194 TNode<ExternalReference> function =
195 ExternalConstant(ExternalReference::insert_remembered_set_function());
196 CallCFunctionWithCallerSavedRegisters(
197 function, MachineTypeOf<Int32T>::value, fp_mode,
198 std::make_pair(MachineTypeOf<IntPtrT>::value, page),
199 std::make_pair(MachineTypeOf<IntPtrT>::value, slot));
200 Goto(&next);
201 }
202
203 BIND(&next);
204 }
205
LoadSlotSet(TNode<IntPtrT> page,Label * slow_path)206 TNode<IntPtrT> LoadSlotSet(TNode<IntPtrT> page, Label* slow_path) {
207 TNode<IntPtrT> slot_set = UncheckedCast<IntPtrT>(
208 Load(MachineType::Pointer(), page,
209 IntPtrConstant(MemoryChunk::kOldToNewSlotSetOffset)));
210 GotoIf(WordEqual(slot_set, IntPtrConstant(0)), slow_path);
211 return slot_set;
212 }
213
LoadBucket(TNode<IntPtrT> slot_set,TNode<WordT> slot_offset,Label * slow_path)214 TNode<IntPtrT> LoadBucket(TNode<IntPtrT> slot_set, TNode<WordT> slot_offset,
215 Label* slow_path) {
216 TNode<WordT> bucket_index =
217 WordShr(slot_offset, SlotSet::kBitsPerBucketLog2 + kTaggedSizeLog2);
218 TNode<IntPtrT> bucket = UncheckedCast<IntPtrT>(
219 Load(MachineType::Pointer(), slot_set,
220 WordShl(bucket_index, kSystemPointerSizeLog2)));
221 GotoIf(WordEqual(bucket, IntPtrConstant(0)), slow_path);
222 return bucket;
223 }
224
SetBitInCell(TNode<IntPtrT> bucket,TNode<WordT> slot_offset)225 void SetBitInCell(TNode<IntPtrT> bucket, TNode<WordT> slot_offset) {
226 // Load cell value
227 TNode<WordT> cell_offset = WordAnd(
228 WordShr(slot_offset, SlotSet::kBitsPerCellLog2 + kTaggedSizeLog2 -
229 SlotSet::kCellSizeBytesLog2),
230 IntPtrConstant((SlotSet::kCellsPerBucket - 1)
231 << SlotSet::kCellSizeBytesLog2));
232 TNode<IntPtrT> cell_address =
233 UncheckedCast<IntPtrT>(IntPtrAdd(bucket, cell_offset));
234 TNode<IntPtrT> old_cell_value =
235 ChangeInt32ToIntPtr(Load<Int32T>(cell_address));
236
237 // Calculate new cell value
238 TNode<WordT> bit_index = WordAnd(WordShr(slot_offset, kTaggedSizeLog2),
239 IntPtrConstant(SlotSet::kBitsPerCell - 1));
240 TNode<IntPtrT> new_cell_value = UncheckedCast<IntPtrT>(
241 WordOr(old_cell_value, WordShl(IntPtrConstant(1), bit_index)));
242
243 // Update cell value
244 StoreNoWriteBarrier(MachineRepresentation::kWord32, cell_address,
245 TruncateIntPtrToInt32(new_cell_value));
246 }
247
GenerationalWriteBarrier(SaveFPRegsMode fp_mode)248 void GenerationalWriteBarrier(SaveFPRegsMode fp_mode) {
249 Label incremental_wb(this), test_old_to_young_flags(this),
250 store_buffer_exit(this), store_buffer_incremental_wb(this), next(this);
251
252 // When incremental marking is not on, we skip cross generation pointer
253 // checking here, because there are checks for
254 // `kPointersFromHereAreInterestingMask` and
255 // `kPointersToHereAreInterestingMask` in
256 // `src/compiler/<arch>/code-generator-<arch>.cc` before calling this
257 // stub, which serves as the cross generation checking.
258 auto slot =
259 UncheckedParameter<IntPtrT>(WriteBarrierDescriptor::kSlotAddress);
260 Branch(IsMarking(), &test_old_to_young_flags, &store_buffer_exit);
261
262 BIND(&test_old_to_young_flags);
263 {
264 // TODO(ishell): do a new-space range check instead.
265 TNode<IntPtrT> value = BitcastTaggedToWord(Load<HeapObject>(slot));
266
267 // TODO(albertnetymk): Try to cache the page flag for value and
268 // object, instead of calling IsPageFlagSet each time.
269 TNode<BoolT> value_is_young =
270 IsPageFlagSet(value, MemoryChunk::kIsInYoungGenerationMask);
271 GotoIfNot(value_is_young, &incremental_wb);
272
273 TNode<IntPtrT> object = BitcastTaggedToWord(
274 UncheckedParameter<Object>(WriteBarrierDescriptor::kObject));
275 TNode<BoolT> object_is_young =
276 IsPageFlagSet(object, MemoryChunk::kIsInYoungGenerationMask);
277 Branch(object_is_young, &incremental_wb, &store_buffer_incremental_wb);
278 }
279
280 BIND(&store_buffer_exit);
281 {
282 TNode<IntPtrT> object = BitcastTaggedToWord(
283 UncheckedParameter<Object>(WriteBarrierDescriptor::kObject));
284 InsertIntoRememberedSet(object, slot, fp_mode);
285 Goto(&next);
286 }
287
288 BIND(&store_buffer_incremental_wb);
289 {
290 TNode<IntPtrT> object = BitcastTaggedToWord(
291 UncheckedParameter<Object>(WriteBarrierDescriptor::kObject));
292 InsertIntoRememberedSet(object, slot, fp_mode);
293 Goto(&incremental_wb);
294 }
295
296 BIND(&incremental_wb);
297 {
298 TNode<IntPtrT> value = BitcastTaggedToWord(Load<HeapObject>(slot));
299 IncrementalWriteBarrier(slot, value, fp_mode);
300 Goto(&next);
301 }
302
303 BIND(&next);
304 }
305
IncrementalWriteBarrier(SaveFPRegsMode fp_mode)306 void IncrementalWriteBarrier(SaveFPRegsMode fp_mode) {
307 auto slot =
308 UncheckedParameter<IntPtrT>(WriteBarrierDescriptor::kSlotAddress);
309 TNode<IntPtrT> value = BitcastTaggedToWord(Load<HeapObject>(slot));
310 IncrementalWriteBarrier(slot, value, fp_mode);
311 }
312
IncrementalWriteBarrier(TNode<IntPtrT> slot,TNode<IntPtrT> value,SaveFPRegsMode fp_mode)313 void IncrementalWriteBarrier(TNode<IntPtrT> slot, TNode<IntPtrT> value,
314 SaveFPRegsMode fp_mode) {
315 Label call_incremental_wb(this), next(this);
316
317 // There are two cases we need to call incremental write barrier.
318 // 1) value_is_white
319 GotoIf(IsWhite(value), &call_incremental_wb);
320
321 // 2) is_compacting && value_in_EC && obj_isnt_skip
322 // is_compacting = true when is_marking = true
323 GotoIfNot(IsPageFlagSet(value, MemoryChunk::kEvacuationCandidateMask),
324 &next);
325
326 {
327 TNode<IntPtrT> object = BitcastTaggedToWord(
328 UncheckedParameter<Object>(WriteBarrierDescriptor::kObject));
329 Branch(
330 IsPageFlagSet(object, MemoryChunk::kSkipEvacuationSlotsRecordingMask),
331 &next, &call_incremental_wb);
332 }
333 BIND(&call_incremental_wb);
334 {
335 TNode<ExternalReference> function = ExternalConstant(
336 ExternalReference::write_barrier_marking_from_code_function());
337 TNode<IntPtrT> object = BitcastTaggedToWord(
338 UncheckedParameter<Object>(WriteBarrierDescriptor::kObject));
339 CallCFunctionWithCallerSavedRegisters(
340 function, MachineTypeOf<Int32T>::value, fp_mode,
341 std::make_pair(MachineTypeOf<IntPtrT>::value, object),
342 std::make_pair(MachineTypeOf<IntPtrT>::value, slot));
343 Goto(&next);
344 }
345 BIND(&next);
346 }
347
GenerateRecordWrite(RememberedSetAction rs_mode,SaveFPRegsMode fp_mode)348 void GenerateRecordWrite(RememberedSetAction rs_mode,
349 SaveFPRegsMode fp_mode) {
350 if (V8_DISABLE_WRITE_BARRIERS_BOOL) {
351 Return(TrueConstant());
352 return;
353 }
354 switch (rs_mode) {
355 case RememberedSetAction::kEmit:
356 GenerationalWriteBarrier(fp_mode);
357 break;
358 case RememberedSetAction::kOmit:
359 IncrementalWriteBarrier(fp_mode);
360 break;
361 }
362 IncrementCounter(isolate()->counters()->write_barriers(), 1);
363 Return(TrueConstant());
364 }
365
GenerateEphemeronKeyBarrier(SaveFPRegsMode fp_mode)366 void GenerateEphemeronKeyBarrier(SaveFPRegsMode fp_mode) {
367 TNode<ExternalReference> function = ExternalConstant(
368 ExternalReference::ephemeron_key_write_barrier_function());
369 TNode<ExternalReference> isolate_constant =
370 ExternalConstant(ExternalReference::isolate_address(isolate()));
371 // In this method we limit the allocatable registers so we have to use
372 // UncheckedParameter. Parameter does not work because the checked cast
373 // needs more registers.
374 auto address =
375 UncheckedParameter<IntPtrT>(WriteBarrierDescriptor::kSlotAddress);
376 TNode<IntPtrT> object = BitcastTaggedToWord(
377 UncheckedParameter<Object>(WriteBarrierDescriptor::kObject));
378
379 CallCFunctionWithCallerSavedRegisters(
380 function, MachineTypeOf<Int32T>::value, fp_mode,
381 std::make_pair(MachineTypeOf<IntPtrT>::value, object),
382 std::make_pair(MachineTypeOf<IntPtrT>::value, address),
383 std::make_pair(MachineTypeOf<ExternalReference>::value,
384 isolate_constant));
385
386 IncrementCounter(isolate()->counters()->write_barriers(), 1);
387 Return(TrueConstant());
388 }
389 };
390
TF_BUILTIN(RecordWriteEmitRememberedSetSaveFP,WriteBarrierCodeStubAssembler)391 TF_BUILTIN(RecordWriteEmitRememberedSetSaveFP, WriteBarrierCodeStubAssembler) {
392 GenerateRecordWrite(RememberedSetAction::kEmit, SaveFPRegsMode::kSave);
393 }
394
TF_BUILTIN(RecordWriteOmitRememberedSetSaveFP,WriteBarrierCodeStubAssembler)395 TF_BUILTIN(RecordWriteOmitRememberedSetSaveFP, WriteBarrierCodeStubAssembler) {
396 GenerateRecordWrite(RememberedSetAction::kOmit, SaveFPRegsMode::kSave);
397 }
398
TF_BUILTIN(RecordWriteEmitRememberedSetIgnoreFP,WriteBarrierCodeStubAssembler)399 TF_BUILTIN(RecordWriteEmitRememberedSetIgnoreFP,
400 WriteBarrierCodeStubAssembler) {
401 GenerateRecordWrite(RememberedSetAction::kEmit, SaveFPRegsMode::kIgnore);
402 }
403
TF_BUILTIN(RecordWriteOmitRememberedSetIgnoreFP,WriteBarrierCodeStubAssembler)404 TF_BUILTIN(RecordWriteOmitRememberedSetIgnoreFP,
405 WriteBarrierCodeStubAssembler) {
406 GenerateRecordWrite(RememberedSetAction::kOmit, SaveFPRegsMode::kIgnore);
407 }
408
TF_BUILTIN(EphemeronKeyBarrierSaveFP,WriteBarrierCodeStubAssembler)409 TF_BUILTIN(EphemeronKeyBarrierSaveFP, WriteBarrierCodeStubAssembler) {
410 GenerateEphemeronKeyBarrier(SaveFPRegsMode::kSave);
411 }
412
TF_BUILTIN(EphemeronKeyBarrierIgnoreFP,WriteBarrierCodeStubAssembler)413 TF_BUILTIN(EphemeronKeyBarrierIgnoreFP, WriteBarrierCodeStubAssembler) {
414 GenerateEphemeronKeyBarrier(SaveFPRegsMode::kIgnore);
415 }
416
417 #ifdef V8_IS_TSAN
418 class TSANRelaxedStoreCodeStubAssembler : public CodeStubAssembler {
419 public:
TSANRelaxedStoreCodeStubAssembler(compiler::CodeAssemblerState * state)420 explicit TSANRelaxedStoreCodeStubAssembler(
421 compiler::CodeAssemblerState* state)
422 : CodeStubAssembler(state) {}
423
GetExternalReference(int size)424 TNode<ExternalReference> GetExternalReference(int size) {
425 if (size == kInt8Size) {
426 return ExternalConstant(
427 ExternalReference::tsan_relaxed_store_function_8_bits());
428 } else if (size == kInt16Size) {
429 return ExternalConstant(
430 ExternalReference::tsan_relaxed_store_function_16_bits());
431 } else if (size == kInt32Size) {
432 return ExternalConstant(
433 ExternalReference::tsan_relaxed_store_function_32_bits());
434 } else {
435 CHECK_EQ(size, kInt64Size);
436 return ExternalConstant(
437 ExternalReference::tsan_relaxed_store_function_64_bits());
438 }
439 }
440
GenerateTSANRelaxedStore(SaveFPRegsMode fp_mode,int size)441 void GenerateTSANRelaxedStore(SaveFPRegsMode fp_mode, int size) {
442 TNode<ExternalReference> function = GetExternalReference(size);
443 auto address = UncheckedParameter<IntPtrT>(TSANStoreDescriptor::kAddress);
444 TNode<IntPtrT> value = BitcastTaggedToWord(
445 UncheckedParameter<Object>(TSANStoreDescriptor::kValue));
446 CallCFunctionWithCallerSavedRegisters(
447 function, MachineType::Int32(), fp_mode,
448 std::make_pair(MachineType::IntPtr(), address),
449 std::make_pair(MachineType::IntPtr(), value));
450 Return(UndefinedConstant());
451 }
452 };
453
TF_BUILTIN(TSANRelaxedStore8IgnoreFP,TSANRelaxedStoreCodeStubAssembler)454 TF_BUILTIN(TSANRelaxedStore8IgnoreFP, TSANRelaxedStoreCodeStubAssembler) {
455 GenerateTSANRelaxedStore(SaveFPRegsMode::kIgnore, kInt8Size);
456 }
457
TF_BUILTIN(TSANRelaxedStore8SaveFP,TSANRelaxedStoreCodeStubAssembler)458 TF_BUILTIN(TSANRelaxedStore8SaveFP, TSANRelaxedStoreCodeStubAssembler) {
459 GenerateTSANRelaxedStore(SaveFPRegsMode::kSave, kInt8Size);
460 }
461
TF_BUILTIN(TSANRelaxedStore16IgnoreFP,TSANRelaxedStoreCodeStubAssembler)462 TF_BUILTIN(TSANRelaxedStore16IgnoreFP, TSANRelaxedStoreCodeStubAssembler) {
463 GenerateTSANRelaxedStore(SaveFPRegsMode::kIgnore, kInt16Size);
464 }
465
TF_BUILTIN(TSANRelaxedStore16SaveFP,TSANRelaxedStoreCodeStubAssembler)466 TF_BUILTIN(TSANRelaxedStore16SaveFP, TSANRelaxedStoreCodeStubAssembler) {
467 GenerateTSANRelaxedStore(SaveFPRegsMode::kSave, kInt16Size);
468 }
469
TF_BUILTIN(TSANRelaxedStore32IgnoreFP,TSANRelaxedStoreCodeStubAssembler)470 TF_BUILTIN(TSANRelaxedStore32IgnoreFP, TSANRelaxedStoreCodeStubAssembler) {
471 GenerateTSANRelaxedStore(SaveFPRegsMode::kIgnore, kInt32Size);
472 }
473
TF_BUILTIN(TSANRelaxedStore32SaveFP,TSANRelaxedStoreCodeStubAssembler)474 TF_BUILTIN(TSANRelaxedStore32SaveFP, TSANRelaxedStoreCodeStubAssembler) {
475 GenerateTSANRelaxedStore(SaveFPRegsMode::kSave, kInt32Size);
476 }
477
TF_BUILTIN(TSANRelaxedStore64IgnoreFP,TSANRelaxedStoreCodeStubAssembler)478 TF_BUILTIN(TSANRelaxedStore64IgnoreFP, TSANRelaxedStoreCodeStubAssembler) {
479 GenerateTSANRelaxedStore(SaveFPRegsMode::kIgnore, kInt64Size);
480 }
481
TF_BUILTIN(TSANRelaxedStore64SaveFP,TSANRelaxedStoreCodeStubAssembler)482 TF_BUILTIN(TSANRelaxedStore64SaveFP, TSANRelaxedStoreCodeStubAssembler) {
483 GenerateTSANRelaxedStore(SaveFPRegsMode::kSave, kInt64Size);
484 }
485
486 class TSANSeqCstStoreCodeStubAssembler : public CodeStubAssembler {
487 public:
TSANSeqCstStoreCodeStubAssembler(compiler::CodeAssemblerState * state)488 explicit TSANSeqCstStoreCodeStubAssembler(compiler::CodeAssemblerState* state)
489 : CodeStubAssembler(state) {}
490
GetExternalReference(int size)491 TNode<ExternalReference> GetExternalReference(int size) {
492 if (size == kInt8Size) {
493 return ExternalConstant(
494 ExternalReference::tsan_seq_cst_store_function_8_bits());
495 } else if (size == kInt16Size) {
496 return ExternalConstant(
497 ExternalReference::tsan_seq_cst_store_function_16_bits());
498 } else if (size == kInt32Size) {
499 return ExternalConstant(
500 ExternalReference::tsan_seq_cst_store_function_32_bits());
501 } else {
502 CHECK_EQ(size, kInt64Size);
503 return ExternalConstant(
504 ExternalReference::tsan_seq_cst_store_function_64_bits());
505 }
506 }
507
GenerateTSANSeqCstStore(SaveFPRegsMode fp_mode,int size)508 void GenerateTSANSeqCstStore(SaveFPRegsMode fp_mode, int size) {
509 TNode<ExternalReference> function = GetExternalReference(size);
510 auto address = UncheckedParameter<IntPtrT>(TSANStoreDescriptor::kAddress);
511 TNode<IntPtrT> value = BitcastTaggedToWord(
512 UncheckedParameter<Object>(TSANStoreDescriptor::kValue));
513 CallCFunctionWithCallerSavedRegisters(
514 function, MachineType::Int32(), fp_mode,
515 std::make_pair(MachineType::IntPtr(), address),
516 std::make_pair(MachineType::IntPtr(), value));
517 Return(UndefinedConstant());
518 }
519 };
520
TF_BUILTIN(TSANSeqCstStore8IgnoreFP,TSANSeqCstStoreCodeStubAssembler)521 TF_BUILTIN(TSANSeqCstStore8IgnoreFP, TSANSeqCstStoreCodeStubAssembler) {
522 GenerateTSANSeqCstStore(SaveFPRegsMode::kIgnore, kInt8Size);
523 }
524
TF_BUILTIN(TSANSeqCstStore8SaveFP,TSANSeqCstStoreCodeStubAssembler)525 TF_BUILTIN(TSANSeqCstStore8SaveFP, TSANSeqCstStoreCodeStubAssembler) {
526 GenerateTSANSeqCstStore(SaveFPRegsMode::kSave, kInt8Size);
527 }
528
TF_BUILTIN(TSANSeqCstStore16IgnoreFP,TSANSeqCstStoreCodeStubAssembler)529 TF_BUILTIN(TSANSeqCstStore16IgnoreFP, TSANSeqCstStoreCodeStubAssembler) {
530 GenerateTSANSeqCstStore(SaveFPRegsMode::kIgnore, kInt16Size);
531 }
532
TF_BUILTIN(TSANSeqCstStore16SaveFP,TSANSeqCstStoreCodeStubAssembler)533 TF_BUILTIN(TSANSeqCstStore16SaveFP, TSANSeqCstStoreCodeStubAssembler) {
534 GenerateTSANSeqCstStore(SaveFPRegsMode::kSave, kInt16Size);
535 }
536
TF_BUILTIN(TSANSeqCstStore32IgnoreFP,TSANSeqCstStoreCodeStubAssembler)537 TF_BUILTIN(TSANSeqCstStore32IgnoreFP, TSANSeqCstStoreCodeStubAssembler) {
538 GenerateTSANSeqCstStore(SaveFPRegsMode::kIgnore, kInt32Size);
539 }
540
TF_BUILTIN(TSANSeqCstStore32SaveFP,TSANSeqCstStoreCodeStubAssembler)541 TF_BUILTIN(TSANSeqCstStore32SaveFP, TSANSeqCstStoreCodeStubAssembler) {
542 GenerateTSANSeqCstStore(SaveFPRegsMode::kSave, kInt32Size);
543 }
544
TF_BUILTIN(TSANSeqCstStore64IgnoreFP,TSANSeqCstStoreCodeStubAssembler)545 TF_BUILTIN(TSANSeqCstStore64IgnoreFP, TSANSeqCstStoreCodeStubAssembler) {
546 GenerateTSANSeqCstStore(SaveFPRegsMode::kIgnore, kInt64Size);
547 }
548
TF_BUILTIN(TSANSeqCstStore64SaveFP,TSANSeqCstStoreCodeStubAssembler)549 TF_BUILTIN(TSANSeqCstStore64SaveFP, TSANSeqCstStoreCodeStubAssembler) {
550 GenerateTSANSeqCstStore(SaveFPRegsMode::kSave, kInt64Size);
551 }
552
553 class TSANRelaxedLoadCodeStubAssembler : public CodeStubAssembler {
554 public:
TSANRelaxedLoadCodeStubAssembler(compiler::CodeAssemblerState * state)555 explicit TSANRelaxedLoadCodeStubAssembler(compiler::CodeAssemblerState* state)
556 : CodeStubAssembler(state) {}
557
GetExternalReference(int size)558 TNode<ExternalReference> GetExternalReference(int size) {
559 if (size == kInt32Size) {
560 return ExternalConstant(
561 ExternalReference::tsan_relaxed_load_function_32_bits());
562 } else {
563 CHECK_EQ(size, kInt64Size);
564 return ExternalConstant(
565 ExternalReference::tsan_relaxed_load_function_64_bits());
566 }
567 }
568
GenerateTSANRelaxedLoad(SaveFPRegsMode fp_mode,int size)569 void GenerateTSANRelaxedLoad(SaveFPRegsMode fp_mode, int size) {
570 TNode<ExternalReference> function = GetExternalReference(size);
571 auto address = UncheckedParameter<IntPtrT>(TSANLoadDescriptor::kAddress);
572 CallCFunctionWithCallerSavedRegisters(
573 function, MachineType::Int32(), fp_mode,
574 std::make_pair(MachineType::IntPtr(), address));
575 Return(UndefinedConstant());
576 }
577 };
578
TF_BUILTIN(TSANRelaxedLoad32IgnoreFP,TSANRelaxedLoadCodeStubAssembler)579 TF_BUILTIN(TSANRelaxedLoad32IgnoreFP, TSANRelaxedLoadCodeStubAssembler) {
580 GenerateTSANRelaxedLoad(SaveFPRegsMode::kIgnore, kInt32Size);
581 }
582
TF_BUILTIN(TSANRelaxedLoad32SaveFP,TSANRelaxedLoadCodeStubAssembler)583 TF_BUILTIN(TSANRelaxedLoad32SaveFP, TSANRelaxedLoadCodeStubAssembler) {
584 GenerateTSANRelaxedLoad(SaveFPRegsMode::kSave, kInt32Size);
585 }
586
TF_BUILTIN(TSANRelaxedLoad64IgnoreFP,TSANRelaxedLoadCodeStubAssembler)587 TF_BUILTIN(TSANRelaxedLoad64IgnoreFP, TSANRelaxedLoadCodeStubAssembler) {
588 GenerateTSANRelaxedLoad(SaveFPRegsMode::kIgnore, kInt64Size);
589 }
590
TF_BUILTIN(TSANRelaxedLoad64SaveFP,TSANRelaxedLoadCodeStubAssembler)591 TF_BUILTIN(TSANRelaxedLoad64SaveFP, TSANRelaxedLoadCodeStubAssembler) {
592 GenerateTSANRelaxedLoad(SaveFPRegsMode::kSave, kInt64Size);
593 }
594 #endif // V8_IS_TSAN
595
596 class DeletePropertyBaseAssembler : public AccessorAssembler {
597 public:
DeletePropertyBaseAssembler(compiler::CodeAssemblerState * state)598 explicit DeletePropertyBaseAssembler(compiler::CodeAssemblerState* state)
599 : AccessorAssembler(state) {}
600
DictionarySpecificDelete(TNode<JSReceiver> receiver,TNode<NameDictionary> properties,TNode<IntPtrT> key_index,TNode<Context> context)601 void DictionarySpecificDelete(TNode<JSReceiver> receiver,
602 TNode<NameDictionary> properties,
603 TNode<IntPtrT> key_index,
604 TNode<Context> context) {
605 // Overwrite the entry itself (see NameDictionary::SetEntry).
606 TNode<Oddball> filler = TheHoleConstant();
607 DCHECK(RootsTable::IsImmortalImmovable(RootIndex::kTheHoleValue));
608 StoreFixedArrayElement(properties, key_index, filler, SKIP_WRITE_BARRIER);
609 StoreValueByKeyIndex<NameDictionary>(properties, key_index, filler,
610 SKIP_WRITE_BARRIER);
611 StoreDetailsByKeyIndex<NameDictionary>(properties, key_index,
612 SmiConstant(0));
613
614 // Update bookkeeping information (see NameDictionary::ElementRemoved).
615 TNode<Smi> nof = GetNumberOfElements<NameDictionary>(properties);
616 TNode<Smi> new_nof = SmiSub(nof, SmiConstant(1));
617 SetNumberOfElements<NameDictionary>(properties, new_nof);
618 TNode<Smi> num_deleted =
619 GetNumberOfDeletedElements<NameDictionary>(properties);
620 TNode<Smi> new_deleted = SmiAdd(num_deleted, SmiConstant(1));
621 SetNumberOfDeletedElements<NameDictionary>(properties, new_deleted);
622
623 // Shrink the dictionary if necessary (see NameDictionary::Shrink).
624 Label shrinking_done(this);
625 TNode<Smi> capacity = GetCapacity<NameDictionary>(properties);
626 GotoIf(SmiGreaterThan(new_nof, SmiShr(capacity, 2)), &shrinking_done);
627 GotoIf(SmiLessThan(new_nof, SmiConstant(16)), &shrinking_done);
628
629 TNode<NameDictionary> new_properties =
630 CAST(CallRuntime(Runtime::kShrinkNameDictionary, context, properties));
631
632 StoreJSReceiverPropertiesOrHash(receiver, new_properties);
633
634 Goto(&shrinking_done);
635 BIND(&shrinking_done);
636 }
637
DictionarySpecificDelete(TNode<JSReceiver> receiver,TNode<SwissNameDictionary> properties,TNode<IntPtrT> key_index,TNode<Context> context)638 void DictionarySpecificDelete(TNode<JSReceiver> receiver,
639 TNode<SwissNameDictionary> properties,
640 TNode<IntPtrT> key_index,
641 TNode<Context> context) {
642 Label shrunk(this), done(this);
643 TVARIABLE(SwissNameDictionary, shrunk_table);
644
645 SwissNameDictionaryDelete(properties, key_index, &shrunk, &shrunk_table);
646 Goto(&done);
647 BIND(&shrunk);
648 StoreJSReceiverPropertiesOrHash(receiver, shrunk_table.value());
649 Goto(&done);
650
651 BIND(&done);
652 }
653
654 template <typename Dictionary>
DeleteDictionaryProperty(TNode<JSReceiver> receiver,TNode<Dictionary> properties,TNode<Name> name,TNode<Context> context,Label * dont_delete,Label * notfound)655 void DeleteDictionaryProperty(TNode<JSReceiver> receiver,
656 TNode<Dictionary> properties, TNode<Name> name,
657 TNode<Context> context, Label* dont_delete,
658 Label* notfound) {
659 TVARIABLE(IntPtrT, var_name_index);
660 Label dictionary_found(this, &var_name_index);
661 NameDictionaryLookup<Dictionary>(properties, name, &dictionary_found,
662 &var_name_index, notfound);
663
664 BIND(&dictionary_found);
665 TNode<IntPtrT> key_index = var_name_index.value();
666 TNode<Uint32T> details = LoadDetailsByKeyIndex(properties, key_index);
667 GotoIf(IsSetWord32(details, PropertyDetails::kAttributesDontDeleteMask),
668 dont_delete);
669
670 DictionarySpecificDelete(receiver, properties, key_index, context);
671
672 Return(TrueConstant());
673 }
674 };
675
TF_BUILTIN(DeleteProperty,DeletePropertyBaseAssembler)676 TF_BUILTIN(DeleteProperty, DeletePropertyBaseAssembler) {
677 auto receiver = Parameter<Object>(Descriptor::kObject);
678 auto key = Parameter<Object>(Descriptor::kKey);
679 auto language_mode = Parameter<Smi>(Descriptor::kLanguageMode);
680 auto context = Parameter<Context>(Descriptor::kContext);
681
682 TVARIABLE(IntPtrT, var_index);
683 TVARIABLE(Name, var_unique);
684 Label if_index(this, &var_index), if_unique_name(this), if_notunique(this),
685 if_notfound(this), slow(this), if_proxy(this);
686
687 GotoIf(TaggedIsSmi(receiver), &slow);
688 TNode<Map> receiver_map = LoadMap(CAST(receiver));
689 TNode<Uint16T> instance_type = LoadMapInstanceType(receiver_map);
690 GotoIf(InstanceTypeEqual(instance_type, JS_PROXY_TYPE), &if_proxy);
691 GotoIf(IsCustomElementsReceiverInstanceType(instance_type), &slow);
692 TryToName(key, &if_index, &var_index, &if_unique_name, &var_unique, &slow,
693 &if_notunique);
694
695 BIND(&if_index);
696 {
697 Comment("integer index");
698 Goto(&slow); // TODO(jkummerow): Implement more smarts here.
699 }
700
701 BIND(&if_unique_name);
702 {
703 Comment("key is unique name");
704 CheckForAssociatedProtector(var_unique.value(), &slow);
705
706 Label dictionary(this), dont_delete(this);
707 GotoIf(IsDictionaryMap(receiver_map), &dictionary);
708
709 // Fast properties need to clear recorded slots and mark the deleted
710 // property as mutable, which can only be done in C++.
711 Goto(&slow);
712
713 BIND(&dictionary);
714 {
715 InvalidateValidityCellIfPrototype(receiver_map);
716
717 TNode<PropertyDictionary> properties =
718 CAST(LoadSlowProperties(CAST(receiver)));
719 DeleteDictionaryProperty(CAST(receiver), properties, var_unique.value(),
720 context, &dont_delete, &if_notfound);
721 }
722
723 BIND(&dont_delete);
724 {
725 STATIC_ASSERT(LanguageModeSize == 2);
726 GotoIf(SmiNotEqual(language_mode, SmiConstant(LanguageMode::kSloppy)),
727 &slow);
728 Return(FalseConstant());
729 }
730 }
731
732 BIND(&if_notunique);
733 {
734 // If the string was not found in the string table, then no object can
735 // have a property with that name.
736 TryInternalizeString(CAST(key), &if_index, &var_index, &if_unique_name,
737 &var_unique, &if_notfound, &slow);
738 }
739
740 BIND(&if_notfound);
741 Return(TrueConstant());
742
743 BIND(&if_proxy);
744 {
745 TNode<Name> name = CAST(CallBuiltin(Builtin::kToName, context, key));
746 GotoIf(IsPrivateSymbol(name), &slow);
747 TailCallBuiltin(Builtin::kProxyDeleteProperty, context, receiver, name,
748 language_mode);
749 }
750
751 BIND(&slow);
752 {
753 TailCallRuntime(Runtime::kDeleteProperty, context, receiver, key,
754 language_mode);
755 }
756 }
757
758 namespace {
759
760 class SetOrCopyDataPropertiesAssembler : public CodeStubAssembler {
761 public:
SetOrCopyDataPropertiesAssembler(compiler::CodeAssemblerState * state)762 explicit SetOrCopyDataPropertiesAssembler(compiler::CodeAssemblerState* state)
763 : CodeStubAssembler(state) {}
764
765 protected:
SetOrCopyDataProperties(TNode<Context> context,TNode<JSReceiver> target,TNode<Object> source,Label * if_runtime,bool use_set=true)766 TNode<Object> SetOrCopyDataProperties(TNode<Context> context,
767 TNode<JSReceiver> target,
768 TNode<Object> source, Label* if_runtime,
769 bool use_set = true) {
770 Label if_done(this), if_noelements(this),
771 if_sourcenotjsobject(this, Label::kDeferred);
772
773 // JSPrimitiveWrapper wrappers for numbers don't have any enumerable own
774 // properties, so we can immediately skip the whole operation if {source} is
775 // a Smi.
776 GotoIf(TaggedIsSmi(source), &if_done);
777
778 // Otherwise check if {source} is a proper JSObject, and if not, defer
779 // to testing for non-empty strings below.
780 TNode<Map> source_map = LoadMap(CAST(source));
781 TNode<Uint16T> source_instance_type = LoadMapInstanceType(source_map);
782 GotoIfNot(IsJSObjectInstanceType(source_instance_type),
783 &if_sourcenotjsobject);
784
785 TNode<FixedArrayBase> source_elements = LoadElements(CAST(source));
786 GotoIf(IsEmptyFixedArray(source_elements), &if_noelements);
787 Branch(IsEmptySlowElementDictionary(source_elements), &if_noelements,
788 if_runtime);
789
790 BIND(&if_noelements);
791 {
792 // If the target is deprecated, the object will be updated on first store.
793 // If the source for that store equals the target, this will invalidate
794 // the cached representation of the source. Handle this case in runtime.
795 TNode<Map> target_map = LoadMap(target);
796 GotoIf(IsDeprecatedMap(target_map), if_runtime);
797
798 if (use_set) {
799 TNode<BoolT> target_is_simple_receiver = IsSimpleObjectMap(target_map);
800 ForEachEnumerableOwnProperty(
801 context, source_map, CAST(source), kEnumerationOrder,
802 [=](TNode<Name> key, TNode<Object> value) {
803 KeyedStoreGenericGenerator::SetProperty(
804 state(), context, target, target_is_simple_receiver, key,
805 value, LanguageMode::kStrict);
806 },
807 if_runtime);
808 } else {
809 ForEachEnumerableOwnProperty(
810 context, source_map, CAST(source), kEnumerationOrder,
811 [=](TNode<Name> key, TNode<Object> value) {
812 CallBuiltin(Builtin::kSetPropertyInLiteral, context, target, key,
813 value);
814 },
815 if_runtime);
816 }
817 Goto(&if_done);
818 }
819
820 BIND(&if_sourcenotjsobject);
821 {
822 // Handle other JSReceivers in the runtime.
823 GotoIf(IsJSReceiverInstanceType(source_instance_type), if_runtime);
824
825 // Non-empty strings are the only non-JSReceivers that need to be
826 // handled explicitly by Object.assign() and CopyDataProperties.
827 GotoIfNot(IsStringInstanceType(source_instance_type), &if_done);
828 TNode<IntPtrT> source_length = LoadStringLengthAsWord(CAST(source));
829 Branch(IntPtrEqual(source_length, IntPtrConstant(0)), &if_done,
830 if_runtime);
831 }
832
833 BIND(&if_done);
834 return UndefinedConstant();
835 }
836 };
837
838 } // namespace
839
840 // ES #sec-copydataproperties
TF_BUILTIN(CopyDataProperties,SetOrCopyDataPropertiesAssembler)841 TF_BUILTIN(CopyDataProperties, SetOrCopyDataPropertiesAssembler) {
842 auto target = Parameter<JSObject>(Descriptor::kTarget);
843 auto source = Parameter<Object>(Descriptor::kSource);
844 auto context = Parameter<Context>(Descriptor::kContext);
845
846 CSA_DCHECK(this, TaggedNotEqual(target, source));
847
848 Label if_runtime(this, Label::kDeferred);
849 Return(SetOrCopyDataProperties(context, target, source, &if_runtime, false));
850
851 BIND(&if_runtime);
852 TailCallRuntime(Runtime::kCopyDataProperties, context, target, source);
853 }
854
TF_BUILTIN(SetDataProperties,SetOrCopyDataPropertiesAssembler)855 TF_BUILTIN(SetDataProperties, SetOrCopyDataPropertiesAssembler) {
856 auto target = Parameter<JSReceiver>(Descriptor::kTarget);
857 auto source = Parameter<Object>(Descriptor::kSource);
858 auto context = Parameter<Context>(Descriptor::kContext);
859
860 Label if_runtime(this, Label::kDeferred);
861 GotoIfForceSlowPath(&if_runtime);
862 Return(SetOrCopyDataProperties(context, target, source, &if_runtime, true));
863
864 BIND(&if_runtime);
865 TailCallRuntime(Runtime::kSetDataProperties, context, target, source);
866 }
867
TF_BUILTIN(ForInEnumerate,CodeStubAssembler)868 TF_BUILTIN(ForInEnumerate, CodeStubAssembler) {
869 auto receiver = Parameter<JSReceiver>(Descriptor::kReceiver);
870 auto context = Parameter<Context>(Descriptor::kContext);
871
872 Label if_empty(this), if_runtime(this, Label::kDeferred);
873 TNode<Map> receiver_map = CheckEnumCache(receiver, &if_empty, &if_runtime);
874 Return(receiver_map);
875
876 BIND(&if_empty);
877 Return(EmptyFixedArrayConstant());
878
879 BIND(&if_runtime);
880 TailCallRuntime(Runtime::kForInEnumerate, context, receiver);
881 }
882
TF_BUILTIN(ForInPrepare,CodeStubAssembler)883 TF_BUILTIN(ForInPrepare, CodeStubAssembler) {
884 // The {enumerator} is either a Map or a FixedArray.
885 auto enumerator = Parameter<HeapObject>(Descriptor::kEnumerator);
886 auto index = Parameter<TaggedIndex>(Descriptor::kVectorIndex);
887 auto feedback_vector = Parameter<FeedbackVector>(Descriptor::kFeedbackVector);
888 TNode<UintPtrT> vector_index = Unsigned(TaggedIndexToIntPtr(index));
889
890 TNode<FixedArray> cache_array;
891 TNode<Smi> cache_length;
892 ForInPrepare(enumerator, vector_index, feedback_vector, &cache_array,
893 &cache_length, UpdateFeedbackMode::kGuaranteedFeedback);
894 Return(cache_array, cache_length);
895 }
896
TF_BUILTIN(ForInFilter,CodeStubAssembler)897 TF_BUILTIN(ForInFilter, CodeStubAssembler) {
898 auto key = Parameter<String>(Descriptor::kKey);
899 auto object = Parameter<HeapObject>(Descriptor::kObject);
900 auto context = Parameter<Context>(Descriptor::kContext);
901
902 Label if_true(this), if_false(this);
903 TNode<Oddball> result = HasProperty(context, object, key, kForInHasProperty);
904 Branch(IsTrue(result), &if_true, &if_false);
905
906 BIND(&if_true);
907 Return(key);
908
909 BIND(&if_false);
910 Return(UndefinedConstant());
911 }
912
TF_BUILTIN(SameValue,CodeStubAssembler)913 TF_BUILTIN(SameValue, CodeStubAssembler) {
914 auto lhs = Parameter<Object>(Descriptor::kLeft);
915 auto rhs = Parameter<Object>(Descriptor::kRight);
916
917 Label if_true(this), if_false(this);
918 BranchIfSameValue(lhs, rhs, &if_true, &if_false);
919
920 BIND(&if_true);
921 Return(TrueConstant());
922
923 BIND(&if_false);
924 Return(FalseConstant());
925 }
926
TF_BUILTIN(SameValueNumbersOnly,CodeStubAssembler)927 TF_BUILTIN(SameValueNumbersOnly, CodeStubAssembler) {
928 auto lhs = Parameter<Object>(Descriptor::kLeft);
929 auto rhs = Parameter<Object>(Descriptor::kRight);
930
931 Label if_true(this), if_false(this);
932 BranchIfSameValue(lhs, rhs, &if_true, &if_false, SameValueMode::kNumbersOnly);
933
934 BIND(&if_true);
935 Return(TrueConstant());
936
937 BIND(&if_false);
938 Return(FalseConstant());
939 }
940
TF_BUILTIN(AdaptorWithBuiltinExitFrame,CodeStubAssembler)941 TF_BUILTIN(AdaptorWithBuiltinExitFrame, CodeStubAssembler) {
942 auto target = Parameter<JSFunction>(Descriptor::kTarget);
943 auto new_target = Parameter<Object>(Descriptor::kNewTarget);
944 auto c_function = UncheckedParameter<WordT>(Descriptor::kCFunction);
945
946 // The logic contained here is mirrored for TurboFan inlining in
947 // JSTypedLowering::ReduceJSCall{Function,Construct}. Keep these in sync.
948
949 // Make sure we operate in the context of the called function (for example
950 // ConstructStubs implemented in C++ will be run in the context of the caller
951 // instead of the callee, due to the way that [[Construct]] is defined for
952 // ordinary functions).
953 TNode<Context> context = LoadJSFunctionContext(target);
954
955 auto actual_argc =
956 UncheckedParameter<Int32T>(Descriptor::kActualArgumentsCount);
957 CodeStubArguments args(this, actual_argc);
958
959 TVARIABLE(Int32T, pushed_argc,
960 TruncateIntPtrToInt32(args.GetLengthWithReceiver()));
961
962 TNode<SharedFunctionInfo> shared = LoadJSFunctionSharedFunctionInfo(target);
963
964 TNode<Int32T> formal_count = UncheckedCast<Int32T>(
965 LoadSharedFunctionInfoFormalParameterCountWithReceiver(shared));
966
967 // The number of arguments pushed is the maximum of actual arguments count
968 // and formal parameters count. Except when the formal parameters count is
969 // the sentinel.
970 Label check_argc(this), update_argc(this), done_argc(this);
971
972 Branch(IsSharedFunctionInfoDontAdaptArguments(shared), &done_argc,
973 &check_argc);
974 BIND(&check_argc);
975 Branch(Int32GreaterThan(formal_count, pushed_argc.value()), &update_argc,
976 &done_argc);
977 BIND(&update_argc);
978 pushed_argc = formal_count;
979 Goto(&done_argc);
980 BIND(&done_argc);
981
982 // Update arguments count for CEntry to contain the number of arguments
983 // including the receiver and the extra arguments.
984 TNode<Int32T> argc = Int32Add(
985 pushed_argc.value(),
986 Int32Constant(BuiltinExitFrameConstants::kNumExtraArgsWithoutReceiver));
987
988 const bool builtin_exit_frame = true;
989 TNode<Code> code =
990 HeapConstant(CodeFactory::CEntry(isolate(), 1, SaveFPRegsMode::kIgnore,
991 ArgvMode::kStack, builtin_exit_frame));
992
993 // Unconditionally push argc, target and new target as extra stack arguments.
994 // They will be used by stack frame iterators when constructing stack trace.
995 TailCallStub(CEntry1ArgvOnStackDescriptor{}, // descriptor
996 code, context, // standard arguments for TailCallStub
997 argc, c_function, // register arguments
998 TheHoleConstant(), // additional stack argument 1 (padding)
999 SmiFromInt32(argc), // additional stack argument 2
1000 target, // additional stack argument 3
1001 new_target); // additional stack argument 4
1002 }
1003
TF_BUILTIN(AllocateInYoungGeneration,CodeStubAssembler)1004 TF_BUILTIN(AllocateInYoungGeneration, CodeStubAssembler) {
1005 auto requested_size = UncheckedParameter<IntPtrT>(Descriptor::kRequestedSize);
1006 CSA_CHECK(this, IsValidPositiveSmi(requested_size));
1007
1008 TNode<Smi> allocation_flags =
1009 SmiConstant(Smi::FromInt(AllocateDoubleAlignFlag::encode(false) |
1010 AllowLargeObjectAllocationFlag::encode(true)));
1011 TailCallRuntime(Runtime::kAllocateInYoungGeneration, NoContextConstant(),
1012 SmiFromIntPtr(requested_size), allocation_flags);
1013 }
1014
TF_BUILTIN(AllocateRegularInYoungGeneration,CodeStubAssembler)1015 TF_BUILTIN(AllocateRegularInYoungGeneration, CodeStubAssembler) {
1016 auto requested_size = UncheckedParameter<IntPtrT>(Descriptor::kRequestedSize);
1017 CSA_CHECK(this, IsValidPositiveSmi(requested_size));
1018
1019 TNode<Smi> allocation_flags =
1020 SmiConstant(Smi::FromInt(AllocateDoubleAlignFlag::encode(false) |
1021 AllowLargeObjectAllocationFlag::encode(false)));
1022 TailCallRuntime(Runtime::kAllocateInYoungGeneration, NoContextConstant(),
1023 SmiFromIntPtr(requested_size), allocation_flags);
1024 }
1025
TF_BUILTIN(AllocateInOldGeneration,CodeStubAssembler)1026 TF_BUILTIN(AllocateInOldGeneration, CodeStubAssembler) {
1027 auto requested_size = UncheckedParameter<IntPtrT>(Descriptor::kRequestedSize);
1028 CSA_CHECK(this, IsValidPositiveSmi(requested_size));
1029
1030 TNode<Smi> runtime_flags =
1031 SmiConstant(Smi::FromInt(AllocateDoubleAlignFlag::encode(false) |
1032 AllowLargeObjectAllocationFlag::encode(true)));
1033 TailCallRuntime(Runtime::kAllocateInOldGeneration, NoContextConstant(),
1034 SmiFromIntPtr(requested_size), runtime_flags);
1035 }
1036
TF_BUILTIN(AllocateRegularInOldGeneration,CodeStubAssembler)1037 TF_BUILTIN(AllocateRegularInOldGeneration, CodeStubAssembler) {
1038 auto requested_size = UncheckedParameter<IntPtrT>(Descriptor::kRequestedSize);
1039 CSA_CHECK(this, IsValidPositiveSmi(requested_size));
1040
1041 TNode<Smi> runtime_flags =
1042 SmiConstant(Smi::FromInt(AllocateDoubleAlignFlag::encode(false) |
1043 AllowLargeObjectAllocationFlag::encode(false)));
1044 TailCallRuntime(Runtime::kAllocateInOldGeneration, NoContextConstant(),
1045 SmiFromIntPtr(requested_size), runtime_flags);
1046 }
1047
TF_BUILTIN(Abort,CodeStubAssembler)1048 TF_BUILTIN(Abort, CodeStubAssembler) {
1049 auto message_id = Parameter<Smi>(Descriptor::kMessageOrMessageId);
1050 TailCallRuntime(Runtime::kAbort, NoContextConstant(), message_id);
1051 }
1052
TF_BUILTIN(AbortCSADcheck,CodeStubAssembler)1053 TF_BUILTIN(AbortCSADcheck, CodeStubAssembler) {
1054 auto message = Parameter<String>(Descriptor::kMessageOrMessageId);
1055 TailCallRuntime(Runtime::kAbortCSADcheck, NoContextConstant(), message);
1056 }
1057
Generate_CEntry_Return1_DontSaveFPRegs_ArgvOnStack_NoBuiltinExit(MacroAssembler * masm)1058 void Builtins::Generate_CEntry_Return1_DontSaveFPRegs_ArgvOnStack_NoBuiltinExit(
1059 MacroAssembler* masm) {
1060 Generate_CEntry(masm, 1, SaveFPRegsMode::kIgnore, ArgvMode::kStack, false);
1061 }
1062
Generate_CEntry_Return1_DontSaveFPRegs_ArgvOnStack_BuiltinExit(MacroAssembler * masm)1063 void Builtins::Generate_CEntry_Return1_DontSaveFPRegs_ArgvOnStack_BuiltinExit(
1064 MacroAssembler* masm) {
1065 Generate_CEntry(masm, 1, SaveFPRegsMode::kIgnore, ArgvMode::kStack, true);
1066 }
1067
1068 void Builtins::
Generate_CEntry_Return1_DontSaveFPRegs_ArgvInRegister_NoBuiltinExit(MacroAssembler * masm)1069 Generate_CEntry_Return1_DontSaveFPRegs_ArgvInRegister_NoBuiltinExit(
1070 MacroAssembler* masm) {
1071 Generate_CEntry(masm, 1, SaveFPRegsMode::kIgnore, ArgvMode::kRegister, false);
1072 }
1073
Generate_CEntry_Return1_SaveFPRegs_ArgvOnStack_NoBuiltinExit(MacroAssembler * masm)1074 void Builtins::Generate_CEntry_Return1_SaveFPRegs_ArgvOnStack_NoBuiltinExit(
1075 MacroAssembler* masm) {
1076 Generate_CEntry(masm, 1, SaveFPRegsMode::kSave, ArgvMode::kStack, false);
1077 }
1078
Generate_CEntry_Return1_SaveFPRegs_ArgvOnStack_BuiltinExit(MacroAssembler * masm)1079 void Builtins::Generate_CEntry_Return1_SaveFPRegs_ArgvOnStack_BuiltinExit(
1080 MacroAssembler* masm) {
1081 Generate_CEntry(masm, 1, SaveFPRegsMode::kSave, ArgvMode::kStack, true);
1082 }
1083
Generate_CEntry_Return2_DontSaveFPRegs_ArgvOnStack_NoBuiltinExit(MacroAssembler * masm)1084 void Builtins::Generate_CEntry_Return2_DontSaveFPRegs_ArgvOnStack_NoBuiltinExit(
1085 MacroAssembler* masm) {
1086 Generate_CEntry(masm, 2, SaveFPRegsMode::kIgnore, ArgvMode::kStack, false);
1087 }
1088
Generate_CEntry_Return2_DontSaveFPRegs_ArgvOnStack_BuiltinExit(MacroAssembler * masm)1089 void Builtins::Generate_CEntry_Return2_DontSaveFPRegs_ArgvOnStack_BuiltinExit(
1090 MacroAssembler* masm) {
1091 Generate_CEntry(masm, 2, SaveFPRegsMode::kIgnore, ArgvMode::kStack, true);
1092 }
1093
1094 void Builtins::
Generate_CEntry_Return2_DontSaveFPRegs_ArgvInRegister_NoBuiltinExit(MacroAssembler * masm)1095 Generate_CEntry_Return2_DontSaveFPRegs_ArgvInRegister_NoBuiltinExit(
1096 MacroAssembler* masm) {
1097 Generate_CEntry(masm, 2, SaveFPRegsMode::kIgnore, ArgvMode::kRegister, false);
1098 }
1099
Generate_CEntry_Return2_SaveFPRegs_ArgvOnStack_NoBuiltinExit(MacroAssembler * masm)1100 void Builtins::Generate_CEntry_Return2_SaveFPRegs_ArgvOnStack_NoBuiltinExit(
1101 MacroAssembler* masm) {
1102 Generate_CEntry(masm, 2, SaveFPRegsMode::kSave, ArgvMode::kStack, false);
1103 }
1104
Generate_CEntry_Return2_SaveFPRegs_ArgvOnStack_BuiltinExit(MacroAssembler * masm)1105 void Builtins::Generate_CEntry_Return2_SaveFPRegs_ArgvOnStack_BuiltinExit(
1106 MacroAssembler* masm) {
1107 Generate_CEntry(masm, 2, SaveFPRegsMode::kSave, ArgvMode::kStack, true);
1108 }
1109
1110 #if !defined(V8_TARGET_ARCH_ARM) && !defined(V8_TARGET_ARCH_MIPS)
Generate_MemCopyUint8Uint8(MacroAssembler * masm)1111 void Builtins::Generate_MemCopyUint8Uint8(MacroAssembler* masm) {
1112 masm->Call(BUILTIN_CODE(masm->isolate(), Illegal), RelocInfo::CODE_TARGET);
1113 }
1114 #endif // !defined(V8_TARGET_ARCH_ARM) && !defined(V8_TARGET_ARCH_MIPS)
1115
1116 #ifndef V8_TARGET_ARCH_IA32
Generate_MemMove(MacroAssembler * masm)1117 void Builtins::Generate_MemMove(MacroAssembler* masm) {
1118 masm->Call(BUILTIN_CODE(masm->isolate(), Illegal), RelocInfo::CODE_TARGET);
1119 }
1120 #endif // V8_TARGET_ARCH_IA32
1121
1122 // TODO(v8:11421): Remove #if once baseline compiler is ported to other
1123 // architectures.
1124 #if ENABLE_SPARKPLUG
Generate_BaselineLeaveFrame(MacroAssembler * masm)1125 void Builtins::Generate_BaselineLeaveFrame(MacroAssembler* masm) {
1126 EmitReturnBaseline(masm);
1127 }
1128 #else
1129 // Stub out implementations of arch-specific baseline builtins.
Generate_BaselineOutOfLinePrologue(MacroAssembler * masm)1130 void Builtins::Generate_BaselineOutOfLinePrologue(MacroAssembler* masm) {
1131 masm->Trap();
1132 }
Generate_BaselineLeaveFrame(MacroAssembler * masm)1133 void Builtins::Generate_BaselineLeaveFrame(MacroAssembler* masm) {
1134 masm->Trap();
1135 }
Generate_BaselineOnStackReplacement(MacroAssembler * masm)1136 void Builtins::Generate_BaselineOnStackReplacement(MacroAssembler* masm) {
1137 masm->Trap();
1138 }
1139 #endif
1140
1141 // ES6 [[Get]] operation.
TF_BUILTIN(GetProperty,CodeStubAssembler)1142 TF_BUILTIN(GetProperty, CodeStubAssembler) {
1143 auto object = Parameter<Object>(Descriptor::kObject);
1144 auto key = Parameter<Object>(Descriptor::kKey);
1145 auto context = Parameter<Context>(Descriptor::kContext);
1146 // TODO(duongn): consider tailcalling to GetPropertyWithReceiver(object,
1147 // object, key, OnNonExistent::kReturnUndefined).
1148 Label if_notfound(this), if_proxy(this, Label::kDeferred),
1149 if_slow(this, Label::kDeferred);
1150
1151 CodeStubAssembler::LookupPropertyInHolder lookup_property_in_holder =
1152 [=](TNode<HeapObject> receiver, TNode<HeapObject> holder,
1153 TNode<Map> holder_map, TNode<Int32T> holder_instance_type,
1154 TNode<Name> unique_name, Label* next_holder, Label* if_bailout) {
1155 TVARIABLE(Object, var_value);
1156 Label if_found(this);
1157 TryGetOwnProperty(context, receiver, CAST(holder), holder_map,
1158 holder_instance_type, unique_name, &if_found,
1159 &var_value, next_holder, if_bailout);
1160 BIND(&if_found);
1161 Return(var_value.value());
1162 };
1163
1164 CodeStubAssembler::LookupElementInHolder lookup_element_in_holder =
1165 [=](TNode<HeapObject> receiver, TNode<HeapObject> holder,
1166 TNode<Map> holder_map, TNode<Int32T> holder_instance_type,
1167 TNode<IntPtrT> index, Label* next_holder, Label* if_bailout) {
1168 // Not supported yet.
1169 Use(next_holder);
1170 Goto(if_bailout);
1171 };
1172
1173 TryPrototypeChainLookup(object, object, key, lookup_property_in_holder,
1174 lookup_element_in_holder, &if_notfound, &if_slow,
1175 &if_proxy);
1176
1177 BIND(&if_notfound);
1178 Return(UndefinedConstant());
1179
1180 BIND(&if_slow);
1181 TailCallRuntime(Runtime::kGetProperty, context, object, key);
1182
1183 BIND(&if_proxy);
1184 {
1185 // Convert the {key} to a Name first.
1186 TNode<Object> name = CallBuiltin(Builtin::kToName, context, key);
1187
1188 // The {object} is a JSProxy instance, look up the {name} on it, passing
1189 // {object} both as receiver and holder. If {name} is absent we can safely
1190 // return undefined from here.
1191 TailCallBuiltin(Builtin::kProxyGetProperty, context, object, name, object,
1192 SmiConstant(OnNonExistent::kReturnUndefined));
1193 }
1194 }
1195
1196 // ES6 [[Get]] operation with Receiver.
TF_BUILTIN(GetPropertyWithReceiver,CodeStubAssembler)1197 TF_BUILTIN(GetPropertyWithReceiver, CodeStubAssembler) {
1198 auto object = Parameter<Object>(Descriptor::kObject);
1199 auto key = Parameter<Object>(Descriptor::kKey);
1200 auto context = Parameter<Context>(Descriptor::kContext);
1201 auto receiver = Parameter<Object>(Descriptor::kReceiver);
1202 auto on_non_existent = Parameter<Object>(Descriptor::kOnNonExistent);
1203 Label if_notfound(this), if_proxy(this, Label::kDeferred),
1204 if_slow(this, Label::kDeferred);
1205
1206 CodeStubAssembler::LookupPropertyInHolder lookup_property_in_holder =
1207 [=](TNode<HeapObject> receiver, TNode<HeapObject> holder,
1208 TNode<Map> holder_map, TNode<Int32T> holder_instance_type,
1209 TNode<Name> unique_name, Label* next_holder, Label* if_bailout) {
1210 TVARIABLE(Object, var_value);
1211 Label if_found(this);
1212 TryGetOwnProperty(context, receiver, CAST(holder), holder_map,
1213 holder_instance_type, unique_name, &if_found,
1214 &var_value, next_holder, if_bailout);
1215 BIND(&if_found);
1216 Return(var_value.value());
1217 };
1218
1219 CodeStubAssembler::LookupElementInHolder lookup_element_in_holder =
1220 [=](TNode<HeapObject> receiver, TNode<HeapObject> holder,
1221 TNode<Map> holder_map, TNode<Int32T> holder_instance_type,
1222 TNode<IntPtrT> index, Label* next_holder, Label* if_bailout) {
1223 // Not supported yet.
1224 Use(next_holder);
1225 Goto(if_bailout);
1226 };
1227
1228 TryPrototypeChainLookup(receiver, object, key, lookup_property_in_holder,
1229 lookup_element_in_holder, &if_notfound, &if_slow,
1230 &if_proxy);
1231
1232 BIND(&if_notfound);
1233 Label throw_reference_error(this);
1234 GotoIf(TaggedEqual(on_non_existent,
1235 SmiConstant(OnNonExistent::kThrowReferenceError)),
1236 &throw_reference_error);
1237 CSA_DCHECK(this, TaggedEqual(on_non_existent,
1238 SmiConstant(OnNonExistent::kReturnUndefined)));
1239 Return(UndefinedConstant());
1240
1241 BIND(&throw_reference_error);
1242 Return(CallRuntime(Runtime::kThrowReferenceError, context, key));
1243
1244 BIND(&if_slow);
1245 TailCallRuntime(Runtime::kGetPropertyWithReceiver, context, object, key,
1246 receiver, on_non_existent);
1247
1248 BIND(&if_proxy);
1249 {
1250 // Convert the {key} to a Name first.
1251 TNode<Name> name = CAST(CallBuiltin(Builtin::kToName, context, key));
1252
1253 // Proxy cannot handle private symbol so bailout.
1254 GotoIf(IsPrivateSymbol(name), &if_slow);
1255
1256 // The {object} is a JSProxy instance, look up the {name} on it, passing
1257 // {object} both as receiver and holder. If {name} is absent we can safely
1258 // return undefined from here.
1259 TailCallBuiltin(Builtin::kProxyGetProperty, context, object, name, receiver,
1260 on_non_existent);
1261 }
1262 }
1263
1264 // ES6 [[Set]] operation.
TF_BUILTIN(SetProperty,CodeStubAssembler)1265 TF_BUILTIN(SetProperty, CodeStubAssembler) {
1266 auto context = Parameter<Context>(Descriptor::kContext);
1267 auto receiver = Parameter<Object>(Descriptor::kReceiver);
1268 auto key = Parameter<Object>(Descriptor::kKey);
1269 auto value = Parameter<Object>(Descriptor::kValue);
1270
1271 KeyedStoreGenericGenerator::SetProperty(state(), context, receiver, key,
1272 value, LanguageMode::kStrict);
1273 }
1274
1275 // ES6 CreateDataProperty(), specialized for the case where objects are still
1276 // being initialized, and have not yet been made accessible to the user. Thus,
1277 // any operation here should be unobservable until after the object has been
1278 // returned.
TF_BUILTIN(SetPropertyInLiteral,CodeStubAssembler)1279 TF_BUILTIN(SetPropertyInLiteral, CodeStubAssembler) {
1280 auto context = Parameter<Context>(Descriptor::kContext);
1281 auto receiver = Parameter<JSObject>(Descriptor::kReceiver);
1282 auto key = Parameter<Object>(Descriptor::kKey);
1283 auto value = Parameter<Object>(Descriptor::kValue);
1284
1285 KeyedStoreGenericGenerator::SetPropertyInLiteral(state(), context, receiver,
1286 key, value);
1287 }
1288
TF_BUILTIN(InstantiateAsmJs,CodeStubAssembler)1289 TF_BUILTIN(InstantiateAsmJs, CodeStubAssembler) {
1290 Label tailcall_to_function(this);
1291 auto context = Parameter<Context>(Descriptor::kContext);
1292 auto new_target = Parameter<Object>(Descriptor::kNewTarget);
1293 auto arg_count =
1294 UncheckedParameter<Int32T>(Descriptor::kActualArgumentsCount);
1295 auto function = Parameter<JSFunction>(Descriptor::kTarget);
1296
1297 // Retrieve arguments from caller (stdlib, foreign, heap).
1298 CodeStubArguments args(this, arg_count);
1299 TNode<Object> stdlib = args.GetOptionalArgumentValue(0);
1300 TNode<Object> foreign = args.GetOptionalArgumentValue(1);
1301 TNode<Object> heap = args.GetOptionalArgumentValue(2);
1302
1303 // Call runtime, on success just pass the result to the caller and pop all
1304 // arguments. A smi 0 is returned on failure, an object on success.
1305 TNode<Object> maybe_result_or_smi_zero = CallRuntime(
1306 Runtime::kInstantiateAsmJs, context, function, stdlib, foreign, heap);
1307 GotoIf(TaggedIsSmi(maybe_result_or_smi_zero), &tailcall_to_function);
1308
1309 TNode<SharedFunctionInfo> shared = LoadJSFunctionSharedFunctionInfo(function);
1310 TNode<Int32T> parameter_count = UncheckedCast<Int32T>(
1311 LoadSharedFunctionInfoFormalParameterCountWithReceiver(shared));
1312 // This builtin intercepts a call to {function}, where the number of arguments
1313 // pushed is the maximum of actual arguments count and formal parameters
1314 // count.
1315 Label argc_lt_param_count(this), argc_ge_param_count(this);
1316 Branch(IntPtrLessThan(args.GetLengthWithReceiver(),
1317 ChangeInt32ToIntPtr(parameter_count)),
1318 &argc_lt_param_count, &argc_ge_param_count);
1319 BIND(&argc_lt_param_count);
1320 PopAndReturn(parameter_count, maybe_result_or_smi_zero);
1321 BIND(&argc_ge_param_count);
1322 args.PopAndReturn(maybe_result_or_smi_zero);
1323
1324 BIND(&tailcall_to_function);
1325 // On failure, tail call back to regular JavaScript by re-calling the given
1326 // function which has been reset to the compile lazy builtin.
1327
1328 // TODO(v8:11880): call CodeT instead.
1329 TNode<Code> code = FromCodeT(LoadJSFunctionCode(function));
1330 TailCallJSCode(code, context, function, new_target, arg_count);
1331 }
1332
1333 } // namespace internal
1334 } // namespace v8
1335