1 // Copyright 2015 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/interpreter/bytecode-array-writer.h"
6 
7 #include "src/api/api-inl.h"
8 #include "src/heap/off-thread-factory-inl.h"
9 #include "src/interpreter/bytecode-jump-table.h"
10 #include "src/interpreter/bytecode-label.h"
11 #include "src/interpreter/bytecode-node.h"
12 #include "src/interpreter/bytecode-register.h"
13 #include "src/interpreter/bytecode-source-info.h"
14 #include "src/interpreter/constant-array-builder.h"
15 #include "src/interpreter/handler-table-builder.h"
16 #include "src/objects/objects-inl.h"
17 
18 namespace v8 {
19 namespace internal {
20 namespace interpreter {
21 
22 STATIC_CONST_MEMBER_DEFINITION const size_t
23     BytecodeArrayWriter::kMaxSizeOfPackedBytecode;
24 
BytecodeArrayWriter(Zone * zone,ConstantArrayBuilder * constant_array_builder,SourcePositionTableBuilder::RecordingMode source_position_mode)25 BytecodeArrayWriter::BytecodeArrayWriter(
26     Zone* zone, ConstantArrayBuilder* constant_array_builder,
27     SourcePositionTableBuilder::RecordingMode source_position_mode)
28     : bytecodes_(zone),
29       unbound_jumps_(0),
30       source_position_table_builder_(source_position_mode),
31       constant_array_builder_(constant_array_builder),
32       last_bytecode_(Bytecode::kIllegal),
33       last_bytecode_offset_(0),
34       last_bytecode_had_source_info_(false),
35       elide_noneffectful_bytecodes_(FLAG_ignition_elide_noneffectful_bytecodes),
36       exit_seen_in_block_(false) {
37   bytecodes_.reserve(512);  // Derived via experimentation.
38 }
39 
40 template <typename LocalIsolate>
ToBytecodeArray(LocalIsolate * isolate,int register_count,int parameter_count,Handle<ByteArray> handler_table)41 Handle<BytecodeArray> BytecodeArrayWriter::ToBytecodeArray(
42     LocalIsolate* isolate, int register_count, int parameter_count,
43     Handle<ByteArray> handler_table) {
44   DCHECK_EQ(0, unbound_jumps_);
45 
46   int bytecode_size = static_cast<int>(bytecodes()->size());
47   int frame_size = register_count * kSystemPointerSize;
48   Handle<FixedArray> constant_pool =
49       constant_array_builder()->ToFixedArray(isolate);
50   Handle<BytecodeArray> bytecode_array = isolate->factory()->NewBytecodeArray(
51       bytecode_size, &bytecodes()->front(), frame_size, parameter_count,
52       constant_pool);
53   bytecode_array->set_handler_table(*handler_table);
54   return bytecode_array;
55 }
56 
57 template EXPORT_TEMPLATE_DEFINE(V8_EXPORT_PRIVATE)
58     Handle<BytecodeArray> BytecodeArrayWriter::ToBytecodeArray(
59         Isolate* isolate, int register_count, int parameter_count,
60         Handle<ByteArray> handler_table);
61 template EXPORT_TEMPLATE_DEFINE(V8_EXPORT_PRIVATE)
62     Handle<BytecodeArray> BytecodeArrayWriter::ToBytecodeArray(
63         OffThreadIsolate* isolate, int register_count, int parameter_count,
64         Handle<ByteArray> handler_table);
65 
66 template <typename LocalIsolate>
ToSourcePositionTable(LocalIsolate * isolate)67 Handle<ByteArray> BytecodeArrayWriter::ToSourcePositionTable(
68     LocalIsolate* isolate) {
69   DCHECK(!source_position_table_builder_.Lazy());
70   Handle<ByteArray> source_position_table =
71       source_position_table_builder_.Omit()
72           ? isolate->factory()->empty_byte_array()
73           : source_position_table_builder_.ToSourcePositionTable(isolate);
74   return source_position_table;
75 }
76 
77 template EXPORT_TEMPLATE_DEFINE(V8_EXPORT_PRIVATE)
78     Handle<ByteArray> BytecodeArrayWriter::ToSourcePositionTable(
79         Isolate* isolate);
80 template EXPORT_TEMPLATE_DEFINE(V8_EXPORT_PRIVATE)
81     Handle<ByteArray> BytecodeArrayWriter::ToSourcePositionTable(
82         OffThreadIsolate* isolate);
83 
84 #ifdef DEBUG
CheckBytecodeMatches(BytecodeArray bytecode)85 int BytecodeArrayWriter::CheckBytecodeMatches(BytecodeArray bytecode) {
86   int mismatches = false;
87   int bytecode_size = static_cast<int>(bytecodes()->size());
88   const byte* bytecode_ptr = &bytecodes()->front();
89   if (bytecode_size != bytecode.length()) mismatches = true;
90 
91   // If there's a mismatch only in the length of the bytecode (very unlikely)
92   // then the first mismatch will be the first extra bytecode.
93   int first_mismatch = std::min(bytecode_size, bytecode.length());
94   for (int i = 0; i < first_mismatch; ++i) {
95     if (bytecode_ptr[i] != bytecode.get(i)) {
96       mismatches = true;
97       first_mismatch = i;
98       break;
99     }
100   }
101 
102   if (mismatches) {
103     return first_mismatch;
104   }
105   return -1;
106 }
107 #endif
108 
Write(BytecodeNode * node)109 void BytecodeArrayWriter::Write(BytecodeNode* node) {
110   DCHECK(!Bytecodes::IsJump(node->bytecode()));
111 
112   if (exit_seen_in_block_) return;  // Don't emit dead code.
113   UpdateExitSeenInBlock(node->bytecode());
114   MaybeElideLastBytecode(node->bytecode(), node->source_info().is_valid());
115 
116   UpdateSourcePositionTable(node);
117   EmitBytecode(node);
118 }
119 
WriteJump(BytecodeNode * node,BytecodeLabel * label)120 void BytecodeArrayWriter::WriteJump(BytecodeNode* node, BytecodeLabel* label) {
121   DCHECK(Bytecodes::IsForwardJump(node->bytecode()));
122 
123   if (exit_seen_in_block_) return;  // Don't emit dead code.
124   UpdateExitSeenInBlock(node->bytecode());
125   MaybeElideLastBytecode(node->bytecode(), node->source_info().is_valid());
126 
127   UpdateSourcePositionTable(node);
128   EmitJump(node, label);
129 }
130 
WriteJumpLoop(BytecodeNode * node,BytecodeLoopHeader * loop_header)131 void BytecodeArrayWriter::WriteJumpLoop(BytecodeNode* node,
132                                         BytecodeLoopHeader* loop_header) {
133   DCHECK_EQ(node->bytecode(), Bytecode::kJumpLoop);
134 
135   if (exit_seen_in_block_) return;  // Don't emit dead code.
136   UpdateExitSeenInBlock(node->bytecode());
137   MaybeElideLastBytecode(node->bytecode(), node->source_info().is_valid());
138 
139   UpdateSourcePositionTable(node);
140   EmitJumpLoop(node, loop_header);
141 }
142 
WriteSwitch(BytecodeNode * node,BytecodeJumpTable * jump_table)143 void BytecodeArrayWriter::WriteSwitch(BytecodeNode* node,
144                                       BytecodeJumpTable* jump_table) {
145   DCHECK(Bytecodes::IsSwitch(node->bytecode()));
146 
147   if (exit_seen_in_block_) return;  // Don't emit dead code.
148   UpdateExitSeenInBlock(node->bytecode());
149   MaybeElideLastBytecode(node->bytecode(), node->source_info().is_valid());
150 
151   UpdateSourcePositionTable(node);
152   EmitSwitch(node, jump_table);
153 }
154 
BindLabel(BytecodeLabel * label)155 void BytecodeArrayWriter::BindLabel(BytecodeLabel* label) {
156   DCHECK(label->has_referrer_jump());
157   size_t current_offset = bytecodes()->size();
158   // Update the jump instruction's location.
159   PatchJump(current_offset, label->jump_offset());
160   label->bind();
161   StartBasicBlock();
162 }
163 
BindLoopHeader(BytecodeLoopHeader * loop_header)164 void BytecodeArrayWriter::BindLoopHeader(BytecodeLoopHeader* loop_header) {
165   size_t current_offset = bytecodes()->size();
166   loop_header->bind_to(current_offset);
167   StartBasicBlock();
168 }
169 
BindJumpTableEntry(BytecodeJumpTable * jump_table,int case_value)170 void BytecodeArrayWriter::BindJumpTableEntry(BytecodeJumpTable* jump_table,
171                                              int case_value) {
172   DCHECK(!jump_table->is_bound(case_value));
173 
174   size_t current_offset = bytecodes()->size();
175   size_t relative_jump = current_offset - jump_table->switch_bytecode_offset();
176 
177   constant_array_builder()->SetJumpTableSmi(
178       jump_table->ConstantPoolEntryFor(case_value),
179       Smi::FromInt(static_cast<int>(relative_jump)));
180   jump_table->mark_bound(case_value);
181 
182   StartBasicBlock();
183 }
184 
BindHandlerTarget(HandlerTableBuilder * handler_table_builder,int handler_id)185 void BytecodeArrayWriter::BindHandlerTarget(
186     HandlerTableBuilder* handler_table_builder, int handler_id) {
187   size_t current_offset = bytecodes()->size();
188   StartBasicBlock();
189   handler_table_builder->SetHandlerTarget(handler_id, current_offset);
190 }
191 
BindTryRegionStart(HandlerTableBuilder * handler_table_builder,int handler_id)192 void BytecodeArrayWriter::BindTryRegionStart(
193     HandlerTableBuilder* handler_table_builder, int handler_id) {
194   size_t current_offset = bytecodes()->size();
195   // Try blocks don't have to be in a separate basic block, but we do have to
196   // invalidate the bytecode to avoid eliding it and changing the offset.
197   InvalidateLastBytecode();
198   handler_table_builder->SetTryRegionStart(handler_id, current_offset);
199 }
200 
BindTryRegionEnd(HandlerTableBuilder * handler_table_builder,int handler_id)201 void BytecodeArrayWriter::BindTryRegionEnd(
202     HandlerTableBuilder* handler_table_builder, int handler_id) {
203   // Try blocks don't have to be in a separate basic block, but we do have to
204   // invalidate the bytecode to avoid eliding it and changing the offset.
205   InvalidateLastBytecode();
206   size_t current_offset = bytecodes()->size();
207   handler_table_builder->SetTryRegionEnd(handler_id, current_offset);
208 }
209 
SetFunctionEntrySourcePosition(int position)210 void BytecodeArrayWriter::SetFunctionEntrySourcePosition(int position) {
211   bool is_statement = false;
212   source_position_table_builder_.AddPosition(
213       kFunctionEntryBytecodeOffset, SourcePosition(position), is_statement);
214 }
215 
StartBasicBlock()216 void BytecodeArrayWriter::StartBasicBlock() {
217   InvalidateLastBytecode();
218   exit_seen_in_block_ = false;
219 }
220 
UpdateSourcePositionTable(const BytecodeNode * const node)221 void BytecodeArrayWriter::UpdateSourcePositionTable(
222     const BytecodeNode* const node) {
223   int bytecode_offset = static_cast<int>(bytecodes()->size());
224   const BytecodeSourceInfo& source_info = node->source_info();
225   if (source_info.is_valid()) {
226     source_position_table_builder()->AddPosition(
227         bytecode_offset, SourcePosition(source_info.source_position()),
228         source_info.is_statement());
229   }
230 }
231 
UpdateExitSeenInBlock(Bytecode bytecode)232 void BytecodeArrayWriter::UpdateExitSeenInBlock(Bytecode bytecode) {
233   switch (bytecode) {
234     case Bytecode::kReturn:
235     case Bytecode::kThrow:
236     case Bytecode::kReThrow:
237     case Bytecode::kAbort:
238     case Bytecode::kJump:
239     case Bytecode::kJumpConstant:
240     case Bytecode::kSuspendGenerator:
241       exit_seen_in_block_ = true;
242       break;
243     default:
244       break;
245   }
246 }
247 
MaybeElideLastBytecode(Bytecode next_bytecode,bool has_source_info)248 void BytecodeArrayWriter::MaybeElideLastBytecode(Bytecode next_bytecode,
249                                                  bool has_source_info) {
250   if (!elide_noneffectful_bytecodes_) return;
251 
252   // If the last bytecode loaded the accumulator without any external effect,
253   // and the next bytecode clobbers this load without reading the accumulator,
254   // then the previous bytecode can be elided as it has no effect.
255   if (Bytecodes::IsAccumulatorLoadWithoutEffects(last_bytecode_) &&
256       Bytecodes::GetAccumulatorUse(next_bytecode) == AccumulatorUse::kWrite &&
257       (!last_bytecode_had_source_info_ || !has_source_info)) {
258     DCHECK_GT(bytecodes()->size(), last_bytecode_offset_);
259     bytecodes()->resize(last_bytecode_offset_);
260     // If the last bytecode had source info we will transfer the source info
261     // to this bytecode.
262     has_source_info |= last_bytecode_had_source_info_;
263   }
264   last_bytecode_ = next_bytecode;
265   last_bytecode_had_source_info_ = has_source_info;
266   last_bytecode_offset_ = bytecodes()->size();
267 }
268 
InvalidateLastBytecode()269 void BytecodeArrayWriter::InvalidateLastBytecode() {
270   last_bytecode_ = Bytecode::kIllegal;
271 }
272 
EmitBytecode(const BytecodeNode * const node)273 void BytecodeArrayWriter::EmitBytecode(const BytecodeNode* const node) {
274   DCHECK_NE(node->bytecode(), Bytecode::kIllegal);
275 
276   Bytecode bytecode = node->bytecode();
277   OperandScale operand_scale = node->operand_scale();
278 
279   if (operand_scale != OperandScale::kSingle) {
280     Bytecode prefix = Bytecodes::OperandScaleToPrefixBytecode(operand_scale);
281     bytecodes()->push_back(Bytecodes::ToByte(prefix));
282   }
283   bytecodes()->push_back(Bytecodes::ToByte(bytecode));
284 
285   const uint32_t* const operands = node->operands();
286   const int operand_count = node->operand_count();
287   const OperandSize* operand_sizes =
288       Bytecodes::GetOperandSizes(bytecode, operand_scale);
289   for (int i = 0; i < operand_count; ++i) {
290     switch (operand_sizes[i]) {
291       case OperandSize::kNone:
292         UNREACHABLE();
293         break;
294       case OperandSize::kByte:
295         bytecodes()->push_back(static_cast<uint8_t>(operands[i]));
296         break;
297       case OperandSize::kShort: {
298         uint16_t operand = static_cast<uint16_t>(operands[i]);
299         const uint8_t* raw_operand = reinterpret_cast<const uint8_t*>(&operand);
300         bytecodes()->push_back(raw_operand[0]);
301         bytecodes()->push_back(raw_operand[1]);
302         break;
303       }
304       case OperandSize::kQuad: {
305         const uint8_t* raw_operand =
306             reinterpret_cast<const uint8_t*>(&operands[i]);
307         bytecodes()->push_back(raw_operand[0]);
308         bytecodes()->push_back(raw_operand[1]);
309         bytecodes()->push_back(raw_operand[2]);
310         bytecodes()->push_back(raw_operand[3]);
311         break;
312       }
313     }
314   }
315 }
316 
317 // static
GetJumpWithConstantOperand(Bytecode jump_bytecode)318 Bytecode GetJumpWithConstantOperand(Bytecode jump_bytecode) {
319   switch (jump_bytecode) {
320     case Bytecode::kJump:
321       return Bytecode::kJumpConstant;
322     case Bytecode::kJumpIfTrue:
323       return Bytecode::kJumpIfTrueConstant;
324     case Bytecode::kJumpIfFalse:
325       return Bytecode::kJumpIfFalseConstant;
326     case Bytecode::kJumpIfToBooleanTrue:
327       return Bytecode::kJumpIfToBooleanTrueConstant;
328     case Bytecode::kJumpIfToBooleanFalse:
329       return Bytecode::kJumpIfToBooleanFalseConstant;
330     case Bytecode::kJumpIfNull:
331       return Bytecode::kJumpIfNullConstant;
332     case Bytecode::kJumpIfNotNull:
333       return Bytecode::kJumpIfNotNullConstant;
334     case Bytecode::kJumpIfUndefined:
335       return Bytecode::kJumpIfUndefinedConstant;
336     case Bytecode::kJumpIfNotUndefined:
337       return Bytecode::kJumpIfNotUndefinedConstant;
338     case Bytecode::kJumpIfUndefinedOrNull:
339       return Bytecode::kJumpIfUndefinedOrNullConstant;
340     case Bytecode::kJumpIfJSReceiver:
341       return Bytecode::kJumpIfJSReceiverConstant;
342     default:
343       UNREACHABLE();
344   }
345 }
346 
PatchJumpWith8BitOperand(size_t jump_location,int delta)347 void BytecodeArrayWriter::PatchJumpWith8BitOperand(size_t jump_location,
348                                                    int delta) {
349   Bytecode jump_bytecode = Bytecodes::FromByte(bytecodes()->at(jump_location));
350   DCHECK(Bytecodes::IsForwardJump(jump_bytecode));
351   DCHECK(Bytecodes::IsJumpImmediate(jump_bytecode));
352   DCHECK_EQ(Bytecodes::GetOperandType(jump_bytecode, 0), OperandType::kUImm);
353   DCHECK_GT(delta, 0);
354   size_t operand_location = jump_location + 1;
355   DCHECK_EQ(bytecodes()->at(operand_location), k8BitJumpPlaceholder);
356   if (Bytecodes::ScaleForUnsignedOperand(delta) == OperandScale::kSingle) {
357     // The jump fits within the range of an UImm8 operand, so cancel
358     // the reservation and jump directly.
359     constant_array_builder()->DiscardReservedEntry(OperandSize::kByte);
360     bytecodes()->at(operand_location) = static_cast<uint8_t>(delta);
361   } else {
362     // The jump does not fit within the range of an UImm8 operand, so
363     // commit reservation putting the offset into the constant pool,
364     // and update the jump instruction and operand.
365     size_t entry = constant_array_builder()->CommitReservedEntry(
366         OperandSize::kByte, Smi::FromInt(delta));
367     DCHECK_EQ(Bytecodes::SizeForUnsignedOperand(static_cast<uint32_t>(entry)),
368               OperandSize::kByte);
369     jump_bytecode = GetJumpWithConstantOperand(jump_bytecode);
370     bytecodes()->at(jump_location) = Bytecodes::ToByte(jump_bytecode);
371     bytecodes()->at(operand_location) = static_cast<uint8_t>(entry);
372   }
373 }
374 
PatchJumpWith16BitOperand(size_t jump_location,int delta)375 void BytecodeArrayWriter::PatchJumpWith16BitOperand(size_t jump_location,
376                                                     int delta) {
377   Bytecode jump_bytecode = Bytecodes::FromByte(bytecodes()->at(jump_location));
378   DCHECK(Bytecodes::IsForwardJump(jump_bytecode));
379   DCHECK(Bytecodes::IsJumpImmediate(jump_bytecode));
380   DCHECK_EQ(Bytecodes::GetOperandType(jump_bytecode, 0), OperandType::kUImm);
381   DCHECK_GT(delta, 0);
382   size_t operand_location = jump_location + 1;
383   uint8_t operand_bytes[2];
384   if (Bytecodes::ScaleForUnsignedOperand(delta) <= OperandScale::kDouble) {
385     // The jump fits within the range of an Imm16 operand, so cancel
386     // the reservation and jump directly.
387     constant_array_builder()->DiscardReservedEntry(OperandSize::kShort);
388     base::WriteUnalignedValue<uint16_t>(
389         reinterpret_cast<Address>(operand_bytes), static_cast<uint16_t>(delta));
390   } else {
391     // The jump does not fit within the range of an Imm16 operand, so
392     // commit reservation putting the offset into the constant pool,
393     // and update the jump instruction and operand.
394     size_t entry = constant_array_builder()->CommitReservedEntry(
395         OperandSize::kShort, Smi::FromInt(delta));
396     jump_bytecode = GetJumpWithConstantOperand(jump_bytecode);
397     bytecodes()->at(jump_location) = Bytecodes::ToByte(jump_bytecode);
398     base::WriteUnalignedValue<uint16_t>(
399         reinterpret_cast<Address>(operand_bytes), static_cast<uint16_t>(entry));
400   }
401   DCHECK(bytecodes()->at(operand_location) == k8BitJumpPlaceholder &&
402          bytecodes()->at(operand_location + 1) == k8BitJumpPlaceholder);
403   bytecodes()->at(operand_location++) = operand_bytes[0];
404   bytecodes()->at(operand_location) = operand_bytes[1];
405 }
406 
PatchJumpWith32BitOperand(size_t jump_location,int delta)407 void BytecodeArrayWriter::PatchJumpWith32BitOperand(size_t jump_location,
408                                                     int delta) {
409   DCHECK(Bytecodes::IsJumpImmediate(
410       Bytecodes::FromByte(bytecodes()->at(jump_location))));
411   constant_array_builder()->DiscardReservedEntry(OperandSize::kQuad);
412   uint8_t operand_bytes[4];
413   base::WriteUnalignedValue<uint32_t>(reinterpret_cast<Address>(operand_bytes),
414                                       static_cast<uint32_t>(delta));
415   size_t operand_location = jump_location + 1;
416   DCHECK(bytecodes()->at(operand_location) == k8BitJumpPlaceholder &&
417          bytecodes()->at(operand_location + 1) == k8BitJumpPlaceholder &&
418          bytecodes()->at(operand_location + 2) == k8BitJumpPlaceholder &&
419          bytecodes()->at(operand_location + 3) == k8BitJumpPlaceholder);
420   bytecodes()->at(operand_location++) = operand_bytes[0];
421   bytecodes()->at(operand_location++) = operand_bytes[1];
422   bytecodes()->at(operand_location++) = operand_bytes[2];
423   bytecodes()->at(operand_location) = operand_bytes[3];
424 }
425 
PatchJump(size_t jump_target,size_t jump_location)426 void BytecodeArrayWriter::PatchJump(size_t jump_target, size_t jump_location) {
427   Bytecode jump_bytecode = Bytecodes::FromByte(bytecodes()->at(jump_location));
428   int delta = static_cast<int>(jump_target - jump_location);
429   int prefix_offset = 0;
430   OperandScale operand_scale = OperandScale::kSingle;
431   if (Bytecodes::IsPrefixScalingBytecode(jump_bytecode)) {
432     // If a prefix scaling bytecode is emitted the target offset is one
433     // less than the case of no prefix scaling bytecode.
434     delta -= 1;
435     prefix_offset = 1;
436     operand_scale = Bytecodes::PrefixBytecodeToOperandScale(jump_bytecode);
437     jump_bytecode =
438         Bytecodes::FromByte(bytecodes()->at(jump_location + prefix_offset));
439   }
440 
441   DCHECK(Bytecodes::IsJump(jump_bytecode));
442   switch (operand_scale) {
443     case OperandScale::kSingle:
444       PatchJumpWith8BitOperand(jump_location, delta);
445       break;
446     case OperandScale::kDouble:
447       PatchJumpWith16BitOperand(jump_location + prefix_offset, delta);
448       break;
449     case OperandScale::kQuadruple:
450       PatchJumpWith32BitOperand(jump_location + prefix_offset, delta);
451       break;
452     default:
453       UNREACHABLE();
454   }
455   unbound_jumps_--;
456 }
457 
EmitJumpLoop(BytecodeNode * node,BytecodeLoopHeader * loop_header)458 void BytecodeArrayWriter::EmitJumpLoop(BytecodeNode* node,
459                                        BytecodeLoopHeader* loop_header) {
460   DCHECK_EQ(node->bytecode(), Bytecode::kJumpLoop);
461   DCHECK_EQ(0u, node->operand(0));
462 
463   size_t current_offset = bytecodes()->size();
464 
465   CHECK_GE(current_offset, loop_header->offset());
466   CHECK_LE(current_offset, static_cast<size_t>(kMaxUInt32));
467   // Label has been bound already so this is a backwards jump.
468   uint32_t delta =
469       static_cast<uint32_t>(current_offset - loop_header->offset());
470   OperandScale operand_scale = Bytecodes::ScaleForUnsignedOperand(delta);
471   if (operand_scale > OperandScale::kSingle) {
472     // Adjust for scaling byte prefix for wide jump offset.
473     delta += 1;
474   }
475   node->update_operand0(delta);
476   EmitBytecode(node);
477 }
478 
EmitJump(BytecodeNode * node,BytecodeLabel * label)479 void BytecodeArrayWriter::EmitJump(BytecodeNode* node, BytecodeLabel* label) {
480   DCHECK(Bytecodes::IsForwardJump(node->bytecode()));
481   DCHECK_EQ(0u, node->operand(0));
482 
483   size_t current_offset = bytecodes()->size();
484 
485   // The label has not yet been bound so this is a forward reference
486   // that will be patched when the label is bound. We create a
487   // reservation in the constant pool so the jump can be patched
488   // when the label is bound. The reservation means the maximum size
489   // of the operand for the constant is known and the jump can
490   // be emitted into the bytecode stream with space for the operand.
491   unbound_jumps_++;
492   label->set_referrer(current_offset);
493   OperandSize reserved_operand_size =
494       constant_array_builder()->CreateReservedEntry();
495   DCHECK_NE(Bytecode::kJumpLoop, node->bytecode());
496   switch (reserved_operand_size) {
497     case OperandSize::kNone:
498       UNREACHABLE();
499     case OperandSize::kByte:
500       node->update_operand0(k8BitJumpPlaceholder);
501       break;
502     case OperandSize::kShort:
503       node->update_operand0(k16BitJumpPlaceholder);
504       break;
505     case OperandSize::kQuad:
506       node->update_operand0(k32BitJumpPlaceholder);
507       break;
508   }
509   EmitBytecode(node);
510 }
511 
EmitSwitch(BytecodeNode * node,BytecodeJumpTable * jump_table)512 void BytecodeArrayWriter::EmitSwitch(BytecodeNode* node,
513                                      BytecodeJumpTable* jump_table) {
514   DCHECK(Bytecodes::IsSwitch(node->bytecode()));
515 
516   size_t current_offset = bytecodes()->size();
517   if (node->operand_scale() > OperandScale::kSingle) {
518     // Adjust for scaling byte prefix.
519     current_offset += 1;
520   }
521   jump_table->set_switch_bytecode_offset(current_offset);
522 
523   EmitBytecode(node);
524 }
525 
526 }  // namespace interpreter
527 }  // namespace internal
528 }  // namespace v8
529