1 /*
2 * Copyright (C) 2009 Apple Inc. All rights reserved.
3 * Copyright (C) 2010 Patrick Gansterer <paroga@paroga.com>
4 *
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions
7 * are met:
8 * 1. Redistributions of source code must retain the above copyright
9 * notice, this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright
11 * notice, this list of conditions and the following disclaimer in the
12 * documentation and/or other materials provided with the distribution.
13 *
14 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
15 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
16 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
17 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
18 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
19 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
20 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
21 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
22 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
23 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
24 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
25 */
26
27 #include "config.h"
28 #if ENABLE(JIT)
29 #include "JIT.h"
30
31 #include "Arguments.h"
32 #include "JITInlineMethods.h"
33 #include "JITStubCall.h"
34 #include "JSArray.h"
35 #include "JSCell.h"
36 #include "JSFunction.h"
37 #include "JSPropertyNameIterator.h"
38 #include "LinkBuffer.h"
39
40 namespace JSC {
41
42 #if USE(JSVALUE64)
43
privateCompileCTIMachineTrampolines(RefPtr<ExecutablePool> * executablePool,JSGlobalData * globalData,TrampolineStructure * trampolines)44 void JIT::privateCompileCTIMachineTrampolines(RefPtr<ExecutablePool>* executablePool, JSGlobalData* globalData, TrampolineStructure *trampolines)
45 {
46 #if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
47 // (2) The second function provides fast property access for string length
48 Label stringLengthBegin = align();
49
50 // Check eax is a string
51 Jump string_failureCases1 = emitJumpIfNotJSCell(regT0);
52 Jump string_failureCases2 = branchPtr(NotEqual, Address(regT0), TrustedImmPtr(m_globalData->jsStringVPtr));
53
54 // Checks out okay! - get the length from the Ustring.
55 load32(Address(regT0, OBJECT_OFFSETOF(JSString, m_length)), regT0);
56
57 Jump string_failureCases3 = branch32(LessThan, regT0, TrustedImm32(0));
58
59 // regT0 contains a 64 bit value (is positive, is zero extended) so we don't need sign extend here.
60 emitFastArithIntToImmNoCheck(regT0, regT0);
61
62 ret();
63 #endif
64
65 // (3) Trampolines for the slow cases of op_call / op_call_eval / op_construct.
66 COMPILE_ASSERT(sizeof(CodeType) == 4, CodeTypeEnumMustBe32Bit);
67
68 // VirtualCallLink Trampoline
69 // regT0 holds callee, regT1 holds argCount. regT2 will hold the FunctionExecutable.
70 JumpList callLinkFailures;
71 Label virtualCallLinkBegin = align();
72 compileOpCallInitializeCallFrame();
73 preserveReturnAddressAfterCall(regT3);
74 emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
75 restoreArgumentReference();
76 Call callLazyLinkCall = call();
77 callLinkFailures.append(branchTestPtr(Zero, regT0));
78 restoreReturnAddressBeforeReturn(regT3);
79 emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT1);
80 jump(regT0);
81
82 // VirtualConstructLink Trampoline
83 // regT0 holds callee, regT1 holds argCount. regT2 will hold the FunctionExecutable.
84 Label virtualConstructLinkBegin = align();
85 compileOpCallInitializeCallFrame();
86 preserveReturnAddressAfterCall(regT3);
87 emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
88 restoreArgumentReference();
89 Call callLazyLinkConstruct = call();
90 callLinkFailures.append(branchTestPtr(Zero, regT0));
91 restoreReturnAddressBeforeReturn(regT3);
92 emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT1);
93 jump(regT0);
94
95 // VirtualCall Trampoline
96 // regT0 holds callee, regT1 holds argCount. regT2 will hold the FunctionExecutable.
97 Label virtualCallBegin = align();
98 compileOpCallInitializeCallFrame();
99
100 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
101
102 Jump hasCodeBlock3 = branch32(GreaterThanOrEqual, Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParametersForCall)), TrustedImm32(0));
103 preserveReturnAddressAfterCall(regT3);
104 restoreArgumentReference();
105 Call callCompileCall = call();
106 callLinkFailures.append(branchTestPtr(Zero, regT0));
107 emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT1);
108 restoreReturnAddressBeforeReturn(regT3);
109 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
110 hasCodeBlock3.link(this);
111
112 loadPtr(Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_jitCodeForCallWithArityCheck)), regT0);
113 jump(regT0);
114
115 // VirtualConstruct Trampoline
116 // regT0 holds callee, regT1 holds argCount. regT2 will hold the FunctionExecutable.
117 Label virtualConstructBegin = align();
118 compileOpCallInitializeCallFrame();
119
120 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
121
122 Jump hasCodeBlock4 = branch32(GreaterThanOrEqual, Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParametersForConstruct)), TrustedImm32(0));
123 preserveReturnAddressAfterCall(regT3);
124 restoreArgumentReference();
125 Call callCompileConstruct = call();
126 callLinkFailures.append(branchTestPtr(Zero, regT0));
127 emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT1);
128 restoreReturnAddressBeforeReturn(regT3);
129 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
130 hasCodeBlock4.link(this);
131
132 loadPtr(Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_jitCodeForConstructWithArityCheck)), regT0);
133 jump(regT0);
134
135 // If the parser fails we want to be able to be able to keep going,
136 // So we handle this as a parse failure.
137 callLinkFailures.link(this);
138 emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC, regT1);
139 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, callFrameRegister);
140 restoreReturnAddressBeforeReturn(regT1);
141 move(TrustedImmPtr(&globalData->exceptionLocation), regT2);
142 storePtr(regT1, regT2);
143 poke(callFrameRegister, 1 + OBJECT_OFFSETOF(struct JITStackFrame, callFrame) / sizeof(void*));
144 poke(TrustedImmPtr(FunctionPtr(ctiVMThrowTrampoline).value()));
145 ret();
146
147 // NativeCall Trampoline
148 Label nativeCallThunk = privateCompileCTINativeCall(globalData);
149 Label nativeConstructThunk = privateCompileCTINativeCall(globalData, true);
150
151 #if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
152 Call string_failureCases1Call = makeTailRecursiveCall(string_failureCases1);
153 Call string_failureCases2Call = makeTailRecursiveCall(string_failureCases2);
154 Call string_failureCases3Call = makeTailRecursiveCall(string_failureCases3);
155 #endif
156
157 // All trampolines constructed! copy the code, link up calls, and set the pointers on the Machine object.
158 LinkBuffer patchBuffer(this, m_globalData->executableAllocator);
159
160 #if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
161 patchBuffer.link(string_failureCases1Call, FunctionPtr(cti_op_get_by_id_string_fail));
162 patchBuffer.link(string_failureCases2Call, FunctionPtr(cti_op_get_by_id_string_fail));
163 patchBuffer.link(string_failureCases3Call, FunctionPtr(cti_op_get_by_id_string_fail));
164 #endif
165 #if ENABLE(JIT_OPTIMIZE_CALL)
166 patchBuffer.link(callLazyLinkCall, FunctionPtr(cti_vm_lazyLinkCall));
167 patchBuffer.link(callLazyLinkConstruct, FunctionPtr(cti_vm_lazyLinkConstruct));
168 #endif
169 patchBuffer.link(callCompileCall, FunctionPtr(cti_op_call_jitCompile));
170 patchBuffer.link(callCompileConstruct, FunctionPtr(cti_op_construct_jitCompile));
171
172 CodeRef finalCode = patchBuffer.finalizeCode();
173 *executablePool = finalCode.m_executablePool;
174
175 trampolines->ctiVirtualCallLink = patchBuffer.trampolineAt(virtualCallLinkBegin);
176 trampolines->ctiVirtualConstructLink = patchBuffer.trampolineAt(virtualConstructLinkBegin);
177 trampolines->ctiVirtualCall = patchBuffer.trampolineAt(virtualCallBegin);
178 trampolines->ctiVirtualConstruct = patchBuffer.trampolineAt(virtualConstructBegin);
179 trampolines->ctiNativeCall = patchBuffer.trampolineAt(nativeCallThunk);
180 trampolines->ctiNativeConstruct = patchBuffer.trampolineAt(nativeConstructThunk);
181 #if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
182 trampolines->ctiStringLengthTrampoline = patchBuffer.trampolineAt(stringLengthBegin);
183 #endif
184 }
185
privateCompileCTINativeCall(JSGlobalData * globalData,bool isConstruct)186 JIT::Label JIT::privateCompileCTINativeCall(JSGlobalData* globalData, bool isConstruct)
187 {
188 int executableOffsetToFunction = isConstruct ? OBJECT_OFFSETOF(NativeExecutable, m_constructor) : OBJECT_OFFSETOF(NativeExecutable, m_function);
189
190 Label nativeCallThunk = align();
191
192 emitPutImmediateToCallFrameHeader(0, RegisterFile::CodeBlock);
193
194 #if CPU(X86_64)
195 // Load caller frame's scope chain into this callframe so that whatever we call can
196 // get to its global data.
197 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT0);
198 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT0);
199 emitPutCellToCallFrameHeader(regT1, RegisterFile::ScopeChain);
200
201 peek(regT1);
202 emitPutToCallFrameHeader(regT1, RegisterFile::ReturnPC);
203
204 // Calling convention: f(edi, esi, edx, ecx, ...);
205 // Host function signature: f(ExecState*);
206 move(callFrameRegister, X86Registers::edi);
207
208 subPtr(TrustedImm32(16 - sizeof(void*)), stackPointerRegister); // Align stack after call.
209
210 emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, X86Registers::esi);
211 loadPtr(Address(X86Registers::esi, OBJECT_OFFSETOF(JSFunction, m_executable)), X86Registers::r9);
212 move(regT0, callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack.
213 call(Address(X86Registers::r9, executableOffsetToFunction));
214
215 addPtr(TrustedImm32(16 - sizeof(void*)), stackPointerRegister);
216
217 #elif CPU(ARM)
218 // Load caller frame's scope chain into this callframe so that whatever we call can
219 // get to its global data.
220 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT2);
221 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT2);
222 emitPutCellToCallFrameHeader(regT1, RegisterFile::ScopeChain);
223
224 preserveReturnAddressAfterCall(regT3); // Callee preserved
225 emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
226
227 // Calling convention: f(r0 == regT0, r1 == regT1, ...);
228 // Host function signature: f(ExecState*);
229 move(callFrameRegister, ARMRegisters::r0);
230
231 emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, ARMRegisters::r1);
232 move(regT2, callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack.
233 loadPtr(Address(ARMRegisters::r1, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
234 call(Address(regT2, executableOffsetToFunction));
235
236 restoreReturnAddressBeforeReturn(regT3);
237
238 #elif CPU(MIPS)
239 // Load caller frame's scope chain into this callframe so that whatever we call can
240 // get to its global data.
241 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT0);
242 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT0);
243 emitPutCellToCallFrameHeader(regT1, RegisterFile::ScopeChain);
244
245 preserveReturnAddressAfterCall(regT3); // Callee preserved
246 emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
247
248 // Calling convention: f(a0, a1, a2, a3);
249 // Host function signature: f(ExecState*);
250
251 // Allocate stack space for 16 bytes (8-byte aligned)
252 // 16 bytes (unused) for 4 arguments
253 subPtr(TrustedImm32(16), stackPointerRegister);
254
255 // Setup arg0
256 move(callFrameRegister, MIPSRegisters::a0);
257
258 // Call
259 emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, MIPSRegisters::a2);
260 loadPtr(Address(MIPSRegisters::a2, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
261 move(regT0, callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack.
262 call(Address(regT2, executableOffsetToFunction));
263
264 // Restore stack space
265 addPtr(TrustedImm32(16), stackPointerRegister);
266
267 restoreReturnAddressBeforeReturn(regT3);
268
269 #elif ENABLE(JIT_OPTIMIZE_NATIVE_CALL)
270 #error "JIT_OPTIMIZE_NATIVE_CALL not yet supported on this platform."
271 #else
272 UNUSED_PARAM(executableOffsetToFunction);
273 breakpoint();
274 #endif
275
276 // Check for an exception
277 loadPtr(&(globalData->exception), regT2);
278 Jump exceptionHandler = branchTestPtr(NonZero, regT2);
279
280 // Return.
281 ret();
282
283 // Handle an exception
284 exceptionHandler.link(this);
285
286 // Grab the return address.
287 preserveReturnAddressAfterCall(regT1);
288
289 move(TrustedImmPtr(&globalData->exceptionLocation), regT2);
290 storePtr(regT1, regT2);
291 poke(callFrameRegister, OBJECT_OFFSETOF(struct JITStackFrame, callFrame) / sizeof(void*));
292
293 // Set the return address.
294 move(TrustedImmPtr(FunctionPtr(ctiVMThrowTrampoline).value()), regT1);
295 restoreReturnAddressBeforeReturn(regT1);
296
297 ret();
298
299 return nativeCallThunk;
300 }
301
privateCompileCTINativeCall(PassRefPtr<ExecutablePool>,JSGlobalData * globalData,NativeFunction)302 JIT::CodePtr JIT::privateCompileCTINativeCall(PassRefPtr<ExecutablePool>, JSGlobalData* globalData, NativeFunction)
303 {
304 return globalData->jitStubs->ctiNativeCall();
305 }
306
emit_op_mov(Instruction * currentInstruction)307 void JIT::emit_op_mov(Instruction* currentInstruction)
308 {
309 int dst = currentInstruction[1].u.operand;
310 int src = currentInstruction[2].u.operand;
311
312 if (m_codeBlock->isConstantRegisterIndex(src)) {
313 storePtr(ImmPtr(JSValue::encode(getConstantOperand(src))), Address(callFrameRegister, dst * sizeof(Register)));
314 if (dst == m_lastResultBytecodeRegister)
315 killLastResultRegister();
316 } else if ((src == m_lastResultBytecodeRegister) || (dst == m_lastResultBytecodeRegister)) {
317 // If either the src or dst is the cached register go though
318 // get/put registers to make sure we track this correctly.
319 emitGetVirtualRegister(src, regT0);
320 emitPutVirtualRegister(dst);
321 } else {
322 // Perform the copy via regT1; do not disturb any mapping in regT0.
323 loadPtr(Address(callFrameRegister, src * sizeof(Register)), regT1);
324 storePtr(regT1, Address(callFrameRegister, dst * sizeof(Register)));
325 }
326 }
327
emit_op_end(Instruction * currentInstruction)328 void JIT::emit_op_end(Instruction* currentInstruction)
329 {
330 ASSERT(returnValueRegister != callFrameRegister);
331 emitGetVirtualRegister(currentInstruction[1].u.operand, returnValueRegister);
332 restoreReturnAddressBeforeReturn(Address(callFrameRegister, RegisterFile::ReturnPC * static_cast<int>(sizeof(Register))));
333 ret();
334 }
335
emit_op_jmp(Instruction * currentInstruction)336 void JIT::emit_op_jmp(Instruction* currentInstruction)
337 {
338 unsigned target = currentInstruction[1].u.operand;
339 addJump(jump(), target);
340 }
341
emit_op_loop_if_lesseq(Instruction * currentInstruction)342 void JIT::emit_op_loop_if_lesseq(Instruction* currentInstruction)
343 {
344 emitTimeoutCheck();
345
346 unsigned op1 = currentInstruction[1].u.operand;
347 unsigned op2 = currentInstruction[2].u.operand;
348 unsigned target = currentInstruction[3].u.operand;
349 if (isOperandConstantImmediateInt(op2)) {
350 emitGetVirtualRegister(op1, regT0);
351 emitJumpSlowCaseIfNotImmediateInteger(regT0);
352 int32_t op2imm = getConstantOperandImmediateInt(op2);
353 addJump(branch32(LessThanOrEqual, regT0, Imm32(op2imm)), target);
354 } else {
355 emitGetVirtualRegisters(op1, regT0, op2, regT1);
356 emitJumpSlowCaseIfNotImmediateInteger(regT0);
357 emitJumpSlowCaseIfNotImmediateInteger(regT1);
358 addJump(branch32(LessThanOrEqual, regT0, regT1), target);
359 }
360 }
361
emit_op_new_object(Instruction * currentInstruction)362 void JIT::emit_op_new_object(Instruction* currentInstruction)
363 {
364 JITStubCall(this, cti_op_new_object).call(currentInstruction[1].u.operand);
365 }
366
emit_op_check_has_instance(Instruction * currentInstruction)367 void JIT::emit_op_check_has_instance(Instruction* currentInstruction)
368 {
369 unsigned baseVal = currentInstruction[1].u.operand;
370
371 emitGetVirtualRegister(baseVal, regT0);
372
373 // Check that baseVal is a cell.
374 emitJumpSlowCaseIfNotJSCell(regT0, baseVal);
375
376 // Check that baseVal 'ImplementsHasInstance'.
377 loadPtr(Address(regT0, JSCell::structureOffset()), regT0);
378 addSlowCase(branchTest8(Zero, Address(regT0, Structure::typeInfoFlagsOffset()), TrustedImm32(ImplementsHasInstance)));
379 }
380
emit_op_instanceof(Instruction * currentInstruction)381 void JIT::emit_op_instanceof(Instruction* currentInstruction)
382 {
383 unsigned dst = currentInstruction[1].u.operand;
384 unsigned value = currentInstruction[2].u.operand;
385 unsigned baseVal = currentInstruction[3].u.operand;
386 unsigned proto = currentInstruction[4].u.operand;
387
388 // Load the operands (baseVal, proto, and value respectively) into registers.
389 // We use regT0 for baseVal since we will be done with this first, and we can then use it for the result.
390 emitGetVirtualRegister(value, regT2);
391 emitGetVirtualRegister(baseVal, regT0);
392 emitGetVirtualRegister(proto, regT1);
393
394 // Check that proto are cells. baseVal must be a cell - this is checked by op_check_has_instance.
395 emitJumpSlowCaseIfNotJSCell(regT2, value);
396 emitJumpSlowCaseIfNotJSCell(regT1, proto);
397
398 // Check that prototype is an object
399 loadPtr(Address(regT1, JSCell::structureOffset()), regT3);
400 addSlowCase(branch8(NotEqual, Address(regT3, Structure::typeInfoTypeOffset()), TrustedImm32(ObjectType)));
401
402 // Fixme: this check is only needed because the JSC API allows HasInstance to be overridden; we should deprecate this.
403 // Check that baseVal 'ImplementsDefaultHasInstance'.
404 loadPtr(Address(regT0, JSCell::structureOffset()), regT0);
405 addSlowCase(branchTest8(Zero, Address(regT0, Structure::typeInfoFlagsOffset()), TrustedImm32(ImplementsDefaultHasInstance)));
406
407 // Optimistically load the result true, and start looping.
408 // Initially, regT1 still contains proto and regT2 still contains value.
409 // As we loop regT2 will be updated with its prototype, recursively walking the prototype chain.
410 move(TrustedImmPtr(JSValue::encode(jsBoolean(true))), regT0);
411 Label loop(this);
412
413 // Load the prototype of the object in regT2. If this is equal to regT1 - WIN!
414 // Otherwise, check if we've hit null - if we have then drop out of the loop, if not go again.
415 loadPtr(Address(regT2, JSCell::structureOffset()), regT2);
416 loadPtr(Address(regT2, Structure::prototypeOffset()), regT2);
417 Jump isInstance = branchPtr(Equal, regT2, regT1);
418 emitJumpIfJSCell(regT2).linkTo(loop, this);
419
420 // We get here either by dropping out of the loop, or if value was not an Object. Result is false.
421 move(TrustedImmPtr(JSValue::encode(jsBoolean(false))), regT0);
422
423 // isInstance jumps right down to here, to skip setting the result to false (it has already set true).
424 isInstance.link(this);
425 emitPutVirtualRegister(dst);
426 }
427
emit_op_call(Instruction * currentInstruction)428 void JIT::emit_op_call(Instruction* currentInstruction)
429 {
430 compileOpCall(op_call, currentInstruction, m_callLinkInfoIndex++);
431 }
432
emit_op_call_eval(Instruction * currentInstruction)433 void JIT::emit_op_call_eval(Instruction* currentInstruction)
434 {
435 compileOpCall(op_call_eval, currentInstruction, m_callLinkInfoIndex++);
436 }
437
emit_op_call_varargs(Instruction * currentInstruction)438 void JIT::emit_op_call_varargs(Instruction* currentInstruction)
439 {
440 compileOpCallVarargs(currentInstruction);
441 }
442
emit_op_construct(Instruction * currentInstruction)443 void JIT::emit_op_construct(Instruction* currentInstruction)
444 {
445 compileOpCall(op_construct, currentInstruction, m_callLinkInfoIndex++);
446 }
447
emit_op_get_global_var(Instruction * currentInstruction)448 void JIT::emit_op_get_global_var(Instruction* currentInstruction)
449 {
450 JSVariableObject* globalObject = m_codeBlock->globalObject();
451 loadPtr(&globalObject->m_registers, regT0);
452 loadPtr(Address(regT0, currentInstruction[2].u.operand * sizeof(Register)), regT0);
453 emitPutVirtualRegister(currentInstruction[1].u.operand);
454 }
455
emit_op_put_global_var(Instruction * currentInstruction)456 void JIT::emit_op_put_global_var(Instruction* currentInstruction)
457 {
458 emitGetVirtualRegister(currentInstruction[2].u.operand, regT1);
459 JSVariableObject* globalObject = m_codeBlock->globalObject();
460 loadPtr(&globalObject->m_registers, regT0);
461 storePtr(regT1, Address(regT0, currentInstruction[1].u.operand * sizeof(Register)));
462 }
463
emit_op_get_scoped_var(Instruction * currentInstruction)464 void JIT::emit_op_get_scoped_var(Instruction* currentInstruction)
465 {
466 int skip = currentInstruction[3].u.operand;
467
468 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT0);
469 bool checkTopLevel = m_codeBlock->codeType() == FunctionCode && m_codeBlock->needsFullScopeChain();
470 ASSERT(skip || !checkTopLevel);
471 if (checkTopLevel && skip--) {
472 Jump activationNotCreated;
473 if (checkTopLevel)
474 activationNotCreated = branchTestPtr(Zero, addressFor(m_codeBlock->activationRegister()));
475 loadPtr(Address(regT0, OBJECT_OFFSETOF(ScopeChainNode, next)), regT0);
476 activationNotCreated.link(this);
477 }
478 while (skip--)
479 loadPtr(Address(regT0, OBJECT_OFFSETOF(ScopeChainNode, next)), regT0);
480
481 loadPtr(Address(regT0, OBJECT_OFFSETOF(ScopeChainNode, object)), regT0);
482 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSVariableObject, m_registers)), regT0);
483 loadPtr(Address(regT0, currentInstruction[2].u.operand * sizeof(Register)), regT0);
484 emitPutVirtualRegister(currentInstruction[1].u.operand);
485 }
486
emit_op_put_scoped_var(Instruction * currentInstruction)487 void JIT::emit_op_put_scoped_var(Instruction* currentInstruction)
488 {
489 int skip = currentInstruction[2].u.operand;
490
491 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1);
492 emitGetVirtualRegister(currentInstruction[3].u.operand, regT0);
493 bool checkTopLevel = m_codeBlock->codeType() == FunctionCode && m_codeBlock->needsFullScopeChain();
494 ASSERT(skip || !checkTopLevel);
495 if (checkTopLevel && skip--) {
496 Jump activationNotCreated;
497 if (checkTopLevel)
498 activationNotCreated = branchTestPtr(Zero, addressFor(m_codeBlock->activationRegister()));
499 loadPtr(Address(regT1, OBJECT_OFFSETOF(ScopeChainNode, next)), regT1);
500 activationNotCreated.link(this);
501 }
502 while (skip--)
503 loadPtr(Address(regT1, OBJECT_OFFSETOF(ScopeChainNode, next)), regT1);
504
505 loadPtr(Address(regT1, OBJECT_OFFSETOF(ScopeChainNode, object)), regT1);
506 loadPtr(Address(regT1, OBJECT_OFFSETOF(JSVariableObject, m_registers)), regT1);
507 storePtr(regT0, Address(regT1, currentInstruction[1].u.operand * sizeof(Register)));
508 }
509
emit_op_tear_off_activation(Instruction * currentInstruction)510 void JIT::emit_op_tear_off_activation(Instruction* currentInstruction)
511 {
512 unsigned activation = currentInstruction[1].u.operand;
513 unsigned arguments = currentInstruction[2].u.operand;
514 Jump activationCreated = branchTestPtr(NonZero, addressFor(activation));
515 Jump argumentsNotCreated = branchTestPtr(Zero, addressFor(arguments));
516 activationCreated.link(this);
517 JITStubCall stubCall(this, cti_op_tear_off_activation);
518 stubCall.addArgument(activation, regT2);
519 stubCall.addArgument(unmodifiedArgumentsRegister(arguments), regT2);
520 stubCall.call();
521 argumentsNotCreated.link(this);
522 }
523
emit_op_tear_off_arguments(Instruction * currentInstruction)524 void JIT::emit_op_tear_off_arguments(Instruction* currentInstruction)
525 {
526 unsigned dst = currentInstruction[1].u.operand;
527
528 Jump argsNotCreated = branchTestPtr(Zero, Address(callFrameRegister, sizeof(Register) * (unmodifiedArgumentsRegister(dst))));
529 JITStubCall stubCall(this, cti_op_tear_off_arguments);
530 stubCall.addArgument(unmodifiedArgumentsRegister(dst), regT2);
531 stubCall.call();
532 argsNotCreated.link(this);
533 }
534
emit_op_ret(Instruction * currentInstruction)535 void JIT::emit_op_ret(Instruction* currentInstruction)
536 {
537 ASSERT(callFrameRegister != regT1);
538 ASSERT(regT1 != returnValueRegister);
539 ASSERT(returnValueRegister != callFrameRegister);
540
541 // Return the result in %eax.
542 emitGetVirtualRegister(currentInstruction[1].u.operand, returnValueRegister);
543
544 // Grab the return address.
545 emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC, regT1);
546
547 // Restore our caller's "r".
548 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, callFrameRegister);
549
550 // Return.
551 restoreReturnAddressBeforeReturn(regT1);
552 ret();
553 }
554
emit_op_ret_object_or_this(Instruction * currentInstruction)555 void JIT::emit_op_ret_object_or_this(Instruction* currentInstruction)
556 {
557 ASSERT(callFrameRegister != regT1);
558 ASSERT(regT1 != returnValueRegister);
559 ASSERT(returnValueRegister != callFrameRegister);
560
561 // Return the result in %eax.
562 emitGetVirtualRegister(currentInstruction[1].u.operand, returnValueRegister);
563 Jump notJSCell = emitJumpIfNotJSCell(returnValueRegister);
564 loadPtr(Address(returnValueRegister, JSCell::structureOffset()), regT2);
565 Jump notObject = branch8(NotEqual, Address(regT2, Structure::typeInfoTypeOffset()), TrustedImm32(ObjectType));
566
567 // Grab the return address.
568 emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC, regT1);
569
570 // Restore our caller's "r".
571 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, callFrameRegister);
572
573 // Return.
574 restoreReturnAddressBeforeReturn(regT1);
575 ret();
576
577 // Return 'this' in %eax.
578 notJSCell.link(this);
579 notObject.link(this);
580 emitGetVirtualRegister(currentInstruction[2].u.operand, returnValueRegister);
581
582 // Grab the return address.
583 emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC, regT1);
584
585 // Restore our caller's "r".
586 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, callFrameRegister);
587
588 // Return.
589 restoreReturnAddressBeforeReturn(regT1);
590 ret();
591 }
592
emit_op_new_array(Instruction * currentInstruction)593 void JIT::emit_op_new_array(Instruction* currentInstruction)
594 {
595 JITStubCall stubCall(this, cti_op_new_array);
596 stubCall.addArgument(Imm32(currentInstruction[2].u.operand));
597 stubCall.addArgument(Imm32(currentInstruction[3].u.operand));
598 stubCall.call(currentInstruction[1].u.operand);
599 }
600
emit_op_resolve(Instruction * currentInstruction)601 void JIT::emit_op_resolve(Instruction* currentInstruction)
602 {
603 JITStubCall stubCall(this, cti_op_resolve);
604 stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
605 stubCall.call(currentInstruction[1].u.operand);
606 }
607
emit_op_to_primitive(Instruction * currentInstruction)608 void JIT::emit_op_to_primitive(Instruction* currentInstruction)
609 {
610 int dst = currentInstruction[1].u.operand;
611 int src = currentInstruction[2].u.operand;
612
613 emitGetVirtualRegister(src, regT0);
614
615 Jump isImm = emitJumpIfNotJSCell(regT0);
616 addSlowCase(branchPtr(NotEqual, Address(regT0), TrustedImmPtr(m_globalData->jsStringVPtr)));
617 isImm.link(this);
618
619 if (dst != src)
620 emitPutVirtualRegister(dst);
621
622 }
623
emit_op_strcat(Instruction * currentInstruction)624 void JIT::emit_op_strcat(Instruction* currentInstruction)
625 {
626 JITStubCall stubCall(this, cti_op_strcat);
627 stubCall.addArgument(Imm32(currentInstruction[2].u.operand));
628 stubCall.addArgument(Imm32(currentInstruction[3].u.operand));
629 stubCall.call(currentInstruction[1].u.operand);
630 }
631
emit_op_resolve_base(Instruction * currentInstruction)632 void JIT::emit_op_resolve_base(Instruction* currentInstruction)
633 {
634 JITStubCall stubCall(this, currentInstruction[3].u.operand ? cti_op_resolve_base_strict_put : cti_op_resolve_base);
635 stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
636 stubCall.call(currentInstruction[1].u.operand);
637 }
638
emit_op_ensure_property_exists(Instruction * currentInstruction)639 void JIT::emit_op_ensure_property_exists(Instruction* currentInstruction)
640 {
641 JITStubCall stubCall(this, cti_op_ensure_property_exists);
642 stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
643 stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
644 stubCall.call(currentInstruction[1].u.operand);
645 }
646
emit_op_resolve_skip(Instruction * currentInstruction)647 void JIT::emit_op_resolve_skip(Instruction* currentInstruction)
648 {
649 JITStubCall stubCall(this, cti_op_resolve_skip);
650 stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
651 stubCall.addArgument(Imm32(currentInstruction[3].u.operand));
652 stubCall.call(currentInstruction[1].u.operand);
653 }
654
emit_op_resolve_global(Instruction * currentInstruction,bool)655 void JIT::emit_op_resolve_global(Instruction* currentInstruction, bool)
656 {
657 // Fast case
658 void* globalObject = m_codeBlock->globalObject();
659 unsigned currentIndex = m_globalResolveInfoIndex++;
660 void* structureAddress = &(m_codeBlock->globalResolveInfo(currentIndex).structure);
661 void* offsetAddr = &(m_codeBlock->globalResolveInfo(currentIndex).offset);
662
663 // Check Structure of global object
664 move(TrustedImmPtr(globalObject), regT0);
665 loadPtr(structureAddress, regT1);
666 addSlowCase(branchPtr(NotEqual, regT1, Address(regT0, JSCell::structureOffset()))); // Structures don't match
667
668 // Load cached property
669 // Assume that the global object always uses external storage.
670 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSGlobalObject, m_propertyStorage)), regT0);
671 load32(offsetAddr, regT1);
672 loadPtr(BaseIndex(regT0, regT1, ScalePtr), regT0);
673 emitPutVirtualRegister(currentInstruction[1].u.operand);
674 }
675
emitSlow_op_resolve_global(Instruction * currentInstruction,Vector<SlowCaseEntry>::iterator & iter)676 void JIT::emitSlow_op_resolve_global(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
677 {
678 unsigned dst = currentInstruction[1].u.operand;
679 Identifier* ident = &m_codeBlock->identifier(currentInstruction[2].u.operand);
680
681 unsigned currentIndex = m_globalResolveInfoIndex++;
682
683 linkSlowCase(iter);
684 JITStubCall stubCall(this, cti_op_resolve_global);
685 stubCall.addArgument(TrustedImmPtr(ident));
686 stubCall.addArgument(Imm32(currentIndex));
687 stubCall.addArgument(regT0);
688 stubCall.call(dst);
689 }
690
emit_op_not(Instruction * currentInstruction)691 void JIT::emit_op_not(Instruction* currentInstruction)
692 {
693 emitGetVirtualRegister(currentInstruction[2].u.operand, regT0);
694
695 // Invert against JSValue(false); if the value was tagged as a boolean, then all bits will be
696 // clear other than the low bit (which will be 0 or 1 for false or true inputs respectively).
697 // Then invert against JSValue(true), which will add the tag back in, and flip the low bit.
698 xorPtr(TrustedImm32(static_cast<int32_t>(ValueFalse)), regT0);
699 addSlowCase(branchTestPtr(NonZero, regT0, TrustedImm32(static_cast<int32_t>(~1))));
700 xorPtr(TrustedImm32(static_cast<int32_t>(ValueTrue)), regT0);
701
702 emitPutVirtualRegister(currentInstruction[1].u.operand);
703 }
704
emit_op_jfalse(Instruction * currentInstruction)705 void JIT::emit_op_jfalse(Instruction* currentInstruction)
706 {
707 unsigned target = currentInstruction[2].u.operand;
708 emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
709
710 addJump(branchPtr(Equal, regT0, TrustedImmPtr(JSValue::encode(jsNumber(0)))), target);
711 Jump isNonZero = emitJumpIfImmediateInteger(regT0);
712
713 addJump(branchPtr(Equal, regT0, TrustedImmPtr(JSValue::encode(jsBoolean(false)))), target);
714 addSlowCase(branchPtr(NotEqual, regT0, TrustedImmPtr(JSValue::encode(jsBoolean(true)))));
715
716 isNonZero.link(this);
717 }
718
emit_op_jeq_null(Instruction * currentInstruction)719 void JIT::emit_op_jeq_null(Instruction* currentInstruction)
720 {
721 unsigned src = currentInstruction[1].u.operand;
722 unsigned target = currentInstruction[2].u.operand;
723
724 emitGetVirtualRegister(src, regT0);
725 Jump isImmediate = emitJumpIfNotJSCell(regT0);
726
727 // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
728 loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
729 addJump(branchTest8(NonZero, Address(regT2, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined)), target);
730 Jump wasNotImmediate = jump();
731
732 // Now handle the immediate cases - undefined & null
733 isImmediate.link(this);
734 andPtr(TrustedImm32(~TagBitUndefined), regT0);
735 addJump(branchPtr(Equal, regT0, TrustedImmPtr(JSValue::encode(jsNull()))), target);
736
737 wasNotImmediate.link(this);
738 };
emit_op_jneq_null(Instruction * currentInstruction)739 void JIT::emit_op_jneq_null(Instruction* currentInstruction)
740 {
741 unsigned src = currentInstruction[1].u.operand;
742 unsigned target = currentInstruction[2].u.operand;
743
744 emitGetVirtualRegister(src, regT0);
745 Jump isImmediate = emitJumpIfNotJSCell(regT0);
746
747 // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
748 loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
749 addJump(branchTest8(Zero, Address(regT2, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined)), target);
750 Jump wasNotImmediate = jump();
751
752 // Now handle the immediate cases - undefined & null
753 isImmediate.link(this);
754 andPtr(TrustedImm32(~TagBitUndefined), regT0);
755 addJump(branchPtr(NotEqual, regT0, TrustedImmPtr(JSValue::encode(jsNull()))), target);
756
757 wasNotImmediate.link(this);
758 }
759
emit_op_jneq_ptr(Instruction * currentInstruction)760 void JIT::emit_op_jneq_ptr(Instruction* currentInstruction)
761 {
762 unsigned src = currentInstruction[1].u.operand;
763 JSCell* ptr = currentInstruction[2].u.jsCell.get();
764 unsigned target = currentInstruction[3].u.operand;
765
766 emitGetVirtualRegister(src, regT0);
767 addJump(branchPtr(NotEqual, regT0, TrustedImmPtr(JSValue::encode(JSValue(ptr)))), target);
768 }
769
emit_op_jsr(Instruction * currentInstruction)770 void JIT::emit_op_jsr(Instruction* currentInstruction)
771 {
772 int retAddrDst = currentInstruction[1].u.operand;
773 int target = currentInstruction[2].u.operand;
774 DataLabelPtr storeLocation = storePtrWithPatch(TrustedImmPtr(0), Address(callFrameRegister, sizeof(Register) * retAddrDst));
775 addJump(jump(), target);
776 m_jsrSites.append(JSRInfo(storeLocation, label()));
777 killLastResultRegister();
778 }
779
emit_op_sret(Instruction * currentInstruction)780 void JIT::emit_op_sret(Instruction* currentInstruction)
781 {
782 jump(Address(callFrameRegister, sizeof(Register) * currentInstruction[1].u.operand));
783 killLastResultRegister();
784 }
785
emit_op_eq(Instruction * currentInstruction)786 void JIT::emit_op_eq(Instruction* currentInstruction)
787 {
788 emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
789 emitJumpSlowCaseIfNotImmediateIntegers(regT0, regT1, regT2);
790 compare32(Equal, regT1, regT0, regT0);
791 emitTagAsBoolImmediate(regT0);
792 emitPutVirtualRegister(currentInstruction[1].u.operand);
793 }
794
emit_op_bitnot(Instruction * currentInstruction)795 void JIT::emit_op_bitnot(Instruction* currentInstruction)
796 {
797 emitGetVirtualRegister(currentInstruction[2].u.operand, regT0);
798 emitJumpSlowCaseIfNotImmediateInteger(regT0);
799 not32(regT0);
800 emitFastArithIntToImmNoCheck(regT0, regT0);
801 emitPutVirtualRegister(currentInstruction[1].u.operand);
802 }
803
emit_op_resolve_with_base(Instruction * currentInstruction)804 void JIT::emit_op_resolve_with_base(Instruction* currentInstruction)
805 {
806 JITStubCall stubCall(this, cti_op_resolve_with_base);
807 stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[3].u.operand)));
808 stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
809 stubCall.call(currentInstruction[2].u.operand);
810 }
811
emit_op_new_func_exp(Instruction * currentInstruction)812 void JIT::emit_op_new_func_exp(Instruction* currentInstruction)
813 {
814 JITStubCall stubCall(this, cti_op_new_func_exp);
815 stubCall.addArgument(TrustedImmPtr(m_codeBlock->functionExpr(currentInstruction[2].u.operand)));
816 stubCall.call(currentInstruction[1].u.operand);
817 }
818
emit_op_jtrue(Instruction * currentInstruction)819 void JIT::emit_op_jtrue(Instruction* currentInstruction)
820 {
821 unsigned target = currentInstruction[2].u.operand;
822 emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
823
824 Jump isZero = branchPtr(Equal, regT0, TrustedImmPtr(JSValue::encode(jsNumber(0))));
825 addJump(emitJumpIfImmediateInteger(regT0), target);
826
827 addJump(branchPtr(Equal, regT0, TrustedImmPtr(JSValue::encode(jsBoolean(true)))), target);
828 addSlowCase(branchPtr(NotEqual, regT0, TrustedImmPtr(JSValue::encode(jsBoolean(false)))));
829
830 isZero.link(this);
831 }
832
emit_op_neq(Instruction * currentInstruction)833 void JIT::emit_op_neq(Instruction* currentInstruction)
834 {
835 emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
836 emitJumpSlowCaseIfNotImmediateIntegers(regT0, regT1, regT2);
837 compare32(NotEqual, regT1, regT0, regT0);
838 emitTagAsBoolImmediate(regT0);
839
840 emitPutVirtualRegister(currentInstruction[1].u.operand);
841
842 }
843
emit_op_bitxor(Instruction * currentInstruction)844 void JIT::emit_op_bitxor(Instruction* currentInstruction)
845 {
846 emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
847 emitJumpSlowCaseIfNotImmediateIntegers(regT0, regT1, regT2);
848 xorPtr(regT1, regT0);
849 emitFastArithReTagImmediate(regT0, regT0);
850 emitPutVirtualRegister(currentInstruction[1].u.operand);
851 }
852
emit_op_bitor(Instruction * currentInstruction)853 void JIT::emit_op_bitor(Instruction* currentInstruction)
854 {
855 emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
856 emitJumpSlowCaseIfNotImmediateIntegers(regT0, regT1, regT2);
857 orPtr(regT1, regT0);
858 emitPutVirtualRegister(currentInstruction[1].u.operand);
859 }
860
emit_op_throw(Instruction * currentInstruction)861 void JIT::emit_op_throw(Instruction* currentInstruction)
862 {
863 JITStubCall stubCall(this, cti_op_throw);
864 stubCall.addArgument(currentInstruction[1].u.operand, regT2);
865 stubCall.call();
866 ASSERT(regT0 == returnValueRegister);
867 #ifndef NDEBUG
868 // cti_op_throw always changes it's return address,
869 // this point in the code should never be reached.
870 breakpoint();
871 #endif
872 }
873
emit_op_get_pnames(Instruction * currentInstruction)874 void JIT::emit_op_get_pnames(Instruction* currentInstruction)
875 {
876 int dst = currentInstruction[1].u.operand;
877 int base = currentInstruction[2].u.operand;
878 int i = currentInstruction[3].u.operand;
879 int size = currentInstruction[4].u.operand;
880 int breakTarget = currentInstruction[5].u.operand;
881
882 JumpList isNotObject;
883
884 emitGetVirtualRegister(base, regT0);
885 if (!m_codeBlock->isKnownNotImmediate(base))
886 isNotObject.append(emitJumpIfNotJSCell(regT0));
887 if (base != m_codeBlock->thisRegister() || m_codeBlock->isStrictMode()) {
888 loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
889 isNotObject.append(branch8(NotEqual, Address(regT2, Structure::typeInfoTypeOffset()), TrustedImm32(ObjectType)));
890 }
891
892 // We could inline the case where you have a valid cache, but
893 // this call doesn't seem to be hot.
894 Label isObject(this);
895 JITStubCall getPnamesStubCall(this, cti_op_get_pnames);
896 getPnamesStubCall.addArgument(regT0);
897 getPnamesStubCall.call(dst);
898 load32(Address(regT0, OBJECT_OFFSETOF(JSPropertyNameIterator, m_jsStringsSize)), regT3);
899 storePtr(tagTypeNumberRegister, payloadFor(i));
900 store32(TrustedImm32(Int32Tag), intTagFor(size));
901 store32(regT3, intPayloadFor(size));
902 Jump end = jump();
903
904 isNotObject.link(this);
905 move(regT0, regT1);
906 and32(TrustedImm32(~TagBitUndefined), regT1);
907 addJump(branch32(Equal, regT1, TrustedImm32(ValueNull)), breakTarget);
908
909 JITStubCall toObjectStubCall(this, cti_to_object);
910 toObjectStubCall.addArgument(regT0);
911 toObjectStubCall.call(base);
912 jump().linkTo(isObject, this);
913
914 end.link(this);
915 }
916
emit_op_next_pname(Instruction * currentInstruction)917 void JIT::emit_op_next_pname(Instruction* currentInstruction)
918 {
919 int dst = currentInstruction[1].u.operand;
920 int base = currentInstruction[2].u.operand;
921 int i = currentInstruction[3].u.operand;
922 int size = currentInstruction[4].u.operand;
923 int it = currentInstruction[5].u.operand;
924 int target = currentInstruction[6].u.operand;
925
926 JumpList callHasProperty;
927
928 Label begin(this);
929 load32(intPayloadFor(i), regT0);
930 Jump end = branch32(Equal, regT0, intPayloadFor(size));
931
932 // Grab key @ i
933 loadPtr(addressFor(it), regT1);
934 loadPtr(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_jsStrings)), regT2);
935
936 loadPtr(BaseIndex(regT2, regT0, TimesEight), regT2);
937
938 emitPutVirtualRegister(dst, regT2);
939
940 // Increment i
941 add32(TrustedImm32(1), regT0);
942 store32(regT0, intPayloadFor(i));
943
944 // Verify that i is valid:
945 emitGetVirtualRegister(base, regT0);
946
947 // Test base's structure
948 loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
949 callHasProperty.append(branchPtr(NotEqual, regT2, Address(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_cachedStructure)))));
950
951 // Test base's prototype chain
952 loadPtr(Address(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_cachedPrototypeChain))), regT3);
953 loadPtr(Address(regT3, OBJECT_OFFSETOF(StructureChain, m_vector)), regT3);
954 addJump(branchTestPtr(Zero, Address(regT3)), target);
955
956 Label checkPrototype(this);
957 loadPtr(Address(regT2, Structure::prototypeOffset()), regT2);
958 callHasProperty.append(emitJumpIfNotJSCell(regT2));
959 loadPtr(Address(regT2, JSCell::structureOffset()), regT2);
960 callHasProperty.append(branchPtr(NotEqual, regT2, Address(regT3)));
961 addPtr(TrustedImm32(sizeof(Structure*)), regT3);
962 branchTestPtr(NonZero, Address(regT3)).linkTo(checkPrototype, this);
963
964 // Continue loop.
965 addJump(jump(), target);
966
967 // Slow case: Ask the object if i is valid.
968 callHasProperty.link(this);
969 emitGetVirtualRegister(dst, regT1);
970 JITStubCall stubCall(this, cti_has_property);
971 stubCall.addArgument(regT0);
972 stubCall.addArgument(regT1);
973 stubCall.call();
974
975 // Test for valid key.
976 addJump(branchTest32(NonZero, regT0), target);
977 jump().linkTo(begin, this);
978
979 // End of loop.
980 end.link(this);
981 }
982
emit_op_push_scope(Instruction * currentInstruction)983 void JIT::emit_op_push_scope(Instruction* currentInstruction)
984 {
985 JITStubCall stubCall(this, cti_op_push_scope);
986 stubCall.addArgument(currentInstruction[1].u.operand, regT2);
987 stubCall.call(currentInstruction[1].u.operand);
988 }
989
emit_op_pop_scope(Instruction *)990 void JIT::emit_op_pop_scope(Instruction*)
991 {
992 JITStubCall(this, cti_op_pop_scope).call();
993 }
994
compileOpStrictEq(Instruction * currentInstruction,CompileOpStrictEqType type)995 void JIT::compileOpStrictEq(Instruction* currentInstruction, CompileOpStrictEqType type)
996 {
997 unsigned dst = currentInstruction[1].u.operand;
998 unsigned src1 = currentInstruction[2].u.operand;
999 unsigned src2 = currentInstruction[3].u.operand;
1000
1001 emitGetVirtualRegisters(src1, regT0, src2, regT1);
1002
1003 // Jump to a slow case if either operand is a number, or if both are JSCell*s.
1004 move(regT0, regT2);
1005 orPtr(regT1, regT2);
1006 addSlowCase(emitJumpIfJSCell(regT2));
1007 addSlowCase(emitJumpIfImmediateNumber(regT2));
1008
1009 if (type == OpStrictEq)
1010 compare32(Equal, regT1, regT0, regT0);
1011 else
1012 compare32(NotEqual, regT1, regT0, regT0);
1013 emitTagAsBoolImmediate(regT0);
1014
1015 emitPutVirtualRegister(dst);
1016 }
1017
emit_op_stricteq(Instruction * currentInstruction)1018 void JIT::emit_op_stricteq(Instruction* currentInstruction)
1019 {
1020 compileOpStrictEq(currentInstruction, OpStrictEq);
1021 }
1022
emit_op_nstricteq(Instruction * currentInstruction)1023 void JIT::emit_op_nstricteq(Instruction* currentInstruction)
1024 {
1025 compileOpStrictEq(currentInstruction, OpNStrictEq);
1026 }
1027
emit_op_to_jsnumber(Instruction * currentInstruction)1028 void JIT::emit_op_to_jsnumber(Instruction* currentInstruction)
1029 {
1030 int srcVReg = currentInstruction[2].u.operand;
1031 emitGetVirtualRegister(srcVReg, regT0);
1032
1033 Jump wasImmediate = emitJumpIfImmediateInteger(regT0);
1034
1035 emitJumpSlowCaseIfNotJSCell(regT0, srcVReg);
1036 loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
1037 addSlowCase(branch8(NotEqual, Address(regT2, Structure::typeInfoTypeOffset()), TrustedImm32(NumberType)));
1038
1039 wasImmediate.link(this);
1040
1041 emitPutVirtualRegister(currentInstruction[1].u.operand);
1042 }
1043
emit_op_push_new_scope(Instruction * currentInstruction)1044 void JIT::emit_op_push_new_scope(Instruction* currentInstruction)
1045 {
1046 JITStubCall stubCall(this, cti_op_push_new_scope);
1047 stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
1048 stubCall.addArgument(currentInstruction[3].u.operand, regT2);
1049 stubCall.call(currentInstruction[1].u.operand);
1050 }
1051
emit_op_catch(Instruction * currentInstruction)1052 void JIT::emit_op_catch(Instruction* currentInstruction)
1053 {
1054 killLastResultRegister(); // FIXME: Implicitly treat op_catch as a labeled statement, and remove this line of code.
1055 move(regT0, callFrameRegister);
1056 peek(regT3, OBJECT_OFFSETOF(struct JITStackFrame, globalData) / sizeof(void*));
1057 loadPtr(Address(regT3, OBJECT_OFFSETOF(JSGlobalData, exception)), regT0);
1058 storePtr(TrustedImmPtr(JSValue::encode(JSValue())), Address(regT3, OBJECT_OFFSETOF(JSGlobalData, exception)));
1059 emitPutVirtualRegister(currentInstruction[1].u.operand);
1060 }
1061
emit_op_jmp_scopes(Instruction * currentInstruction)1062 void JIT::emit_op_jmp_scopes(Instruction* currentInstruction)
1063 {
1064 JITStubCall stubCall(this, cti_op_jmp_scopes);
1065 stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
1066 stubCall.call();
1067 addJump(jump(), currentInstruction[2].u.operand);
1068 }
1069
emit_op_switch_imm(Instruction * currentInstruction)1070 void JIT::emit_op_switch_imm(Instruction* currentInstruction)
1071 {
1072 unsigned tableIndex = currentInstruction[1].u.operand;
1073 unsigned defaultOffset = currentInstruction[2].u.operand;
1074 unsigned scrutinee = currentInstruction[3].u.operand;
1075
1076 // create jump table for switch destinations, track this switch statement.
1077 SimpleJumpTable* jumpTable = &m_codeBlock->immediateSwitchJumpTable(tableIndex);
1078 m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset, SwitchRecord::Immediate));
1079 jumpTable->ctiOffsets.grow(jumpTable->branchOffsets.size());
1080
1081 JITStubCall stubCall(this, cti_op_switch_imm);
1082 stubCall.addArgument(scrutinee, regT2);
1083 stubCall.addArgument(Imm32(tableIndex));
1084 stubCall.call();
1085 jump(regT0);
1086 }
1087
emit_op_switch_char(Instruction * currentInstruction)1088 void JIT::emit_op_switch_char(Instruction* currentInstruction)
1089 {
1090 unsigned tableIndex = currentInstruction[1].u.operand;
1091 unsigned defaultOffset = currentInstruction[2].u.operand;
1092 unsigned scrutinee = currentInstruction[3].u.operand;
1093
1094 // create jump table for switch destinations, track this switch statement.
1095 SimpleJumpTable* jumpTable = &m_codeBlock->characterSwitchJumpTable(tableIndex);
1096 m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset, SwitchRecord::Character));
1097 jumpTable->ctiOffsets.grow(jumpTable->branchOffsets.size());
1098
1099 JITStubCall stubCall(this, cti_op_switch_char);
1100 stubCall.addArgument(scrutinee, regT2);
1101 stubCall.addArgument(Imm32(tableIndex));
1102 stubCall.call();
1103 jump(regT0);
1104 }
1105
emit_op_switch_string(Instruction * currentInstruction)1106 void JIT::emit_op_switch_string(Instruction* currentInstruction)
1107 {
1108 unsigned tableIndex = currentInstruction[1].u.operand;
1109 unsigned defaultOffset = currentInstruction[2].u.operand;
1110 unsigned scrutinee = currentInstruction[3].u.operand;
1111
1112 // create jump table for switch destinations, track this switch statement.
1113 StringJumpTable* jumpTable = &m_codeBlock->stringSwitchJumpTable(tableIndex);
1114 m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset));
1115
1116 JITStubCall stubCall(this, cti_op_switch_string);
1117 stubCall.addArgument(scrutinee, regT2);
1118 stubCall.addArgument(Imm32(tableIndex));
1119 stubCall.call();
1120 jump(regT0);
1121 }
1122
emit_op_throw_reference_error(Instruction * currentInstruction)1123 void JIT::emit_op_throw_reference_error(Instruction* currentInstruction)
1124 {
1125 JITStubCall stubCall(this, cti_op_throw_reference_error);
1126 stubCall.addArgument(ImmPtr(JSValue::encode(m_codeBlock->getConstant(currentInstruction[1].u.operand))));
1127 stubCall.call();
1128 }
1129
emit_op_debug(Instruction * currentInstruction)1130 void JIT::emit_op_debug(Instruction* currentInstruction)
1131 {
1132 #if ENABLE(DEBUG_WITH_BREAKPOINT)
1133 UNUSED_PARAM(currentInstruction);
1134 breakpoint();
1135 #else
1136 JITStubCall stubCall(this, cti_op_debug);
1137 stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
1138 stubCall.addArgument(Imm32(currentInstruction[2].u.operand));
1139 stubCall.addArgument(Imm32(currentInstruction[3].u.operand));
1140 stubCall.call();
1141 #endif
1142 }
1143
emit_op_eq_null(Instruction * currentInstruction)1144 void JIT::emit_op_eq_null(Instruction* currentInstruction)
1145 {
1146 unsigned dst = currentInstruction[1].u.operand;
1147 unsigned src1 = currentInstruction[2].u.operand;
1148
1149 emitGetVirtualRegister(src1, regT0);
1150 Jump isImmediate = emitJumpIfNotJSCell(regT0);
1151
1152 loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
1153 test8(NonZero, Address(regT2, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined), regT0);
1154
1155 Jump wasNotImmediate = jump();
1156
1157 isImmediate.link(this);
1158
1159 andPtr(TrustedImm32(~TagBitUndefined), regT0);
1160 comparePtr(Equal, regT0, TrustedImm32(ValueNull), regT0);
1161
1162 wasNotImmediate.link(this);
1163
1164 emitTagAsBoolImmediate(regT0);
1165 emitPutVirtualRegister(dst);
1166
1167 }
1168
emit_op_neq_null(Instruction * currentInstruction)1169 void JIT::emit_op_neq_null(Instruction* currentInstruction)
1170 {
1171 unsigned dst = currentInstruction[1].u.operand;
1172 unsigned src1 = currentInstruction[2].u.operand;
1173
1174 emitGetVirtualRegister(src1, regT0);
1175 Jump isImmediate = emitJumpIfNotJSCell(regT0);
1176
1177 loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
1178 test8(Zero, Address(regT2, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined), regT0);
1179
1180 Jump wasNotImmediate = jump();
1181
1182 isImmediate.link(this);
1183
1184 andPtr(TrustedImm32(~TagBitUndefined), regT0);
1185 comparePtr(NotEqual, regT0, TrustedImm32(ValueNull), regT0);
1186
1187 wasNotImmediate.link(this);
1188
1189 emitTagAsBoolImmediate(regT0);
1190 emitPutVirtualRegister(dst);
1191 }
1192
emit_op_enter(Instruction *)1193 void JIT::emit_op_enter(Instruction*)
1194 {
1195 // Even though CTI doesn't use them, we initialize our constant
1196 // registers to zap stale pointers, to avoid unnecessarily prolonging
1197 // object lifetime and increasing GC pressure.
1198 size_t count = m_codeBlock->m_numVars;
1199 for (size_t j = 0; j < count; ++j)
1200 emitInitRegister(j);
1201
1202 }
1203
emit_op_create_activation(Instruction * currentInstruction)1204 void JIT::emit_op_create_activation(Instruction* currentInstruction)
1205 {
1206 unsigned dst = currentInstruction[1].u.operand;
1207
1208 Jump activationCreated = branchTestPtr(NonZero, Address(callFrameRegister, sizeof(Register) * dst));
1209 JITStubCall(this, cti_op_push_activation).call(currentInstruction[1].u.operand);
1210 emitPutVirtualRegister(dst);
1211 activationCreated.link(this);
1212 }
1213
emit_op_create_arguments(Instruction * currentInstruction)1214 void JIT::emit_op_create_arguments(Instruction* currentInstruction)
1215 {
1216 unsigned dst = currentInstruction[1].u.operand;
1217
1218 Jump argsCreated = branchTestPtr(NonZero, Address(callFrameRegister, sizeof(Register) * dst));
1219 if (m_codeBlock->m_numParameters == 1)
1220 JITStubCall(this, cti_op_create_arguments_no_params).call();
1221 else
1222 JITStubCall(this, cti_op_create_arguments).call();
1223 emitPutVirtualRegister(dst);
1224 emitPutVirtualRegister(unmodifiedArgumentsRegister(dst));
1225 argsCreated.link(this);
1226 }
1227
emit_op_init_lazy_reg(Instruction * currentInstruction)1228 void JIT::emit_op_init_lazy_reg(Instruction* currentInstruction)
1229 {
1230 unsigned dst = currentInstruction[1].u.operand;
1231
1232 storePtr(TrustedImmPtr(0), Address(callFrameRegister, sizeof(Register) * dst));
1233 }
1234
emit_op_convert_this(Instruction * currentInstruction)1235 void JIT::emit_op_convert_this(Instruction* currentInstruction)
1236 {
1237 emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
1238
1239 emitJumpSlowCaseIfNotJSCell(regT0);
1240 loadPtr(Address(regT0, JSCell::structureOffset()), regT1);
1241 addSlowCase(branchTest8(NonZero, Address(regT1, Structure::typeInfoFlagsOffset()), TrustedImm32(NeedsThisConversion)));
1242 }
1243
emit_op_convert_this_strict(Instruction * currentInstruction)1244 void JIT::emit_op_convert_this_strict(Instruction* currentInstruction)
1245 {
1246 emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
1247 Jump notNull = branchTestPtr(NonZero, regT0);
1248 move(TrustedImmPtr(JSValue::encode(jsNull())), regT0);
1249 emitPutVirtualRegister(currentInstruction[1].u.operand, regT0);
1250 Jump setThis = jump();
1251 notNull.link(this);
1252 Jump isImmediate = emitJumpIfNotJSCell(regT0);
1253 loadPtr(Address(regT0, JSCell::structureOffset()), regT1);
1254 Jump notAnObject = branch8(NotEqual, Address(regT1, Structure::typeInfoTypeOffset()), TrustedImm32(ObjectType));
1255 addSlowCase(branchTest8(NonZero, Address(regT1, Structure::typeInfoFlagsOffset()), TrustedImm32(NeedsThisConversion)));
1256 isImmediate.link(this);
1257 notAnObject.link(this);
1258 setThis.link(this);
1259 }
1260
emit_op_get_callee(Instruction * currentInstruction)1261 void JIT::emit_op_get_callee(Instruction* currentInstruction)
1262 {
1263 unsigned result = currentInstruction[1].u.operand;
1264 emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, regT0);
1265 emitPutVirtualRegister(result);
1266 }
1267
emit_op_create_this(Instruction * currentInstruction)1268 void JIT::emit_op_create_this(Instruction* currentInstruction)
1269 {
1270 JITStubCall stubCall(this, cti_op_create_this);
1271 stubCall.addArgument(currentInstruction[2].u.operand, regT1);
1272 stubCall.call(currentInstruction[1].u.operand);
1273 }
1274
emit_op_profile_will_call(Instruction * currentInstruction)1275 void JIT::emit_op_profile_will_call(Instruction* currentInstruction)
1276 {
1277 peek(regT1, OBJECT_OFFSETOF(JITStackFrame, enabledProfilerReference) / sizeof(void*));
1278 Jump noProfiler = branchTestPtr(Zero, Address(regT1));
1279
1280 JITStubCall stubCall(this, cti_op_profile_will_call);
1281 stubCall.addArgument(currentInstruction[1].u.operand, regT1);
1282 stubCall.call();
1283 noProfiler.link(this);
1284
1285 }
1286
emit_op_profile_did_call(Instruction * currentInstruction)1287 void JIT::emit_op_profile_did_call(Instruction* currentInstruction)
1288 {
1289 peek(regT1, OBJECT_OFFSETOF(JITStackFrame, enabledProfilerReference) / sizeof(void*));
1290 Jump noProfiler = branchTestPtr(Zero, Address(regT1));
1291
1292 JITStubCall stubCall(this, cti_op_profile_did_call);
1293 stubCall.addArgument(currentInstruction[1].u.operand, regT1);
1294 stubCall.call();
1295 noProfiler.link(this);
1296 }
1297
1298
1299 // Slow cases
1300
emitSlow_op_convert_this(Instruction * currentInstruction,Vector<SlowCaseEntry>::iterator & iter)1301 void JIT::emitSlow_op_convert_this(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1302 {
1303 linkSlowCase(iter);
1304 linkSlowCase(iter);
1305 JITStubCall stubCall(this, cti_op_convert_this);
1306 stubCall.addArgument(regT0);
1307 stubCall.call(currentInstruction[1].u.operand);
1308 }
1309
emitSlow_op_convert_this_strict(Instruction * currentInstruction,Vector<SlowCaseEntry>::iterator & iter)1310 void JIT::emitSlow_op_convert_this_strict(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1311 {
1312 linkSlowCase(iter);
1313 JITStubCall stubCall(this, cti_op_convert_this_strict);
1314 stubCall.addArgument(regT0);
1315 stubCall.call(currentInstruction[1].u.operand);
1316 }
1317
emitSlow_op_to_primitive(Instruction * currentInstruction,Vector<SlowCaseEntry>::iterator & iter)1318 void JIT::emitSlow_op_to_primitive(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1319 {
1320 linkSlowCase(iter);
1321
1322 JITStubCall stubCall(this, cti_op_to_primitive);
1323 stubCall.addArgument(regT0);
1324 stubCall.call(currentInstruction[1].u.operand);
1325 }
1326
emitSlow_op_loop_if_lesseq(Instruction * currentInstruction,Vector<SlowCaseEntry>::iterator & iter)1327 void JIT::emitSlow_op_loop_if_lesseq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1328 {
1329 unsigned op2 = currentInstruction[2].u.operand;
1330 unsigned target = currentInstruction[3].u.operand;
1331 if (isOperandConstantImmediateInt(op2)) {
1332 linkSlowCase(iter);
1333 JITStubCall stubCall(this, cti_op_loop_if_lesseq);
1334 stubCall.addArgument(regT0);
1335 stubCall.addArgument(currentInstruction[2].u.operand, regT2);
1336 stubCall.call();
1337 emitJumpSlowToHot(branchTest32(NonZero, regT0), target);
1338 } else {
1339 linkSlowCase(iter);
1340 linkSlowCase(iter);
1341 JITStubCall stubCall(this, cti_op_loop_if_lesseq);
1342 stubCall.addArgument(regT0);
1343 stubCall.addArgument(regT1);
1344 stubCall.call();
1345 emitJumpSlowToHot(branchTest32(NonZero, regT0), target);
1346 }
1347 }
1348
emitSlow_op_put_by_val(Instruction * currentInstruction,Vector<SlowCaseEntry>::iterator & iter)1349 void JIT::emitSlow_op_put_by_val(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1350 {
1351 unsigned base = currentInstruction[1].u.operand;
1352 unsigned property = currentInstruction[2].u.operand;
1353 unsigned value = currentInstruction[3].u.operand;
1354
1355 linkSlowCase(iter); // property int32 check
1356 linkSlowCaseIfNotJSCell(iter, base); // base cell check
1357 linkSlowCase(iter); // base not array check
1358 linkSlowCase(iter); // in vector check
1359
1360 JITStubCall stubPutByValCall(this, cti_op_put_by_val);
1361 stubPutByValCall.addArgument(regT0);
1362 stubPutByValCall.addArgument(property, regT2);
1363 stubPutByValCall.addArgument(value, regT2);
1364 stubPutByValCall.call();
1365 }
1366
emitSlow_op_not(Instruction * currentInstruction,Vector<SlowCaseEntry>::iterator & iter)1367 void JIT::emitSlow_op_not(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1368 {
1369 linkSlowCase(iter);
1370 xorPtr(TrustedImm32(static_cast<int32_t>(ValueFalse)), regT0);
1371 JITStubCall stubCall(this, cti_op_not);
1372 stubCall.addArgument(regT0);
1373 stubCall.call(currentInstruction[1].u.operand);
1374 }
1375
emitSlow_op_jfalse(Instruction * currentInstruction,Vector<SlowCaseEntry>::iterator & iter)1376 void JIT::emitSlow_op_jfalse(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1377 {
1378 linkSlowCase(iter);
1379 JITStubCall stubCall(this, cti_op_jtrue);
1380 stubCall.addArgument(regT0);
1381 stubCall.call();
1382 emitJumpSlowToHot(branchTest32(Zero, regT0), currentInstruction[2].u.operand); // inverted!
1383 }
1384
emitSlow_op_bitnot(Instruction * currentInstruction,Vector<SlowCaseEntry>::iterator & iter)1385 void JIT::emitSlow_op_bitnot(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1386 {
1387 linkSlowCase(iter);
1388 JITStubCall stubCall(this, cti_op_bitnot);
1389 stubCall.addArgument(regT0);
1390 stubCall.call(currentInstruction[1].u.operand);
1391 }
1392
emitSlow_op_jtrue(Instruction * currentInstruction,Vector<SlowCaseEntry>::iterator & iter)1393 void JIT::emitSlow_op_jtrue(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1394 {
1395 linkSlowCase(iter);
1396 JITStubCall stubCall(this, cti_op_jtrue);
1397 stubCall.addArgument(regT0);
1398 stubCall.call();
1399 emitJumpSlowToHot(branchTest32(NonZero, regT0), currentInstruction[2].u.operand);
1400 }
1401
emitSlow_op_bitxor(Instruction * currentInstruction,Vector<SlowCaseEntry>::iterator & iter)1402 void JIT::emitSlow_op_bitxor(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1403 {
1404 linkSlowCase(iter);
1405 JITStubCall stubCall(this, cti_op_bitxor);
1406 stubCall.addArgument(regT0);
1407 stubCall.addArgument(regT1);
1408 stubCall.call(currentInstruction[1].u.operand);
1409 }
1410
emitSlow_op_bitor(Instruction * currentInstruction,Vector<SlowCaseEntry>::iterator & iter)1411 void JIT::emitSlow_op_bitor(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1412 {
1413 linkSlowCase(iter);
1414 JITStubCall stubCall(this, cti_op_bitor);
1415 stubCall.addArgument(regT0);
1416 stubCall.addArgument(regT1);
1417 stubCall.call(currentInstruction[1].u.operand);
1418 }
1419
emitSlow_op_eq(Instruction * currentInstruction,Vector<SlowCaseEntry>::iterator & iter)1420 void JIT::emitSlow_op_eq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1421 {
1422 linkSlowCase(iter);
1423 JITStubCall stubCall(this, cti_op_eq);
1424 stubCall.addArgument(regT0);
1425 stubCall.addArgument(regT1);
1426 stubCall.call();
1427 emitTagAsBoolImmediate(regT0);
1428 emitPutVirtualRegister(currentInstruction[1].u.operand);
1429 }
1430
emitSlow_op_neq(Instruction * currentInstruction,Vector<SlowCaseEntry>::iterator & iter)1431 void JIT::emitSlow_op_neq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1432 {
1433 linkSlowCase(iter);
1434 JITStubCall stubCall(this, cti_op_eq);
1435 stubCall.addArgument(regT0);
1436 stubCall.addArgument(regT1);
1437 stubCall.call();
1438 xor32(TrustedImm32(0x1), regT0);
1439 emitTagAsBoolImmediate(regT0);
1440 emitPutVirtualRegister(currentInstruction[1].u.operand);
1441 }
1442
emitSlow_op_stricteq(Instruction * currentInstruction,Vector<SlowCaseEntry>::iterator & iter)1443 void JIT::emitSlow_op_stricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1444 {
1445 linkSlowCase(iter);
1446 linkSlowCase(iter);
1447 JITStubCall stubCall(this, cti_op_stricteq);
1448 stubCall.addArgument(regT0);
1449 stubCall.addArgument(regT1);
1450 stubCall.call(currentInstruction[1].u.operand);
1451 }
1452
emitSlow_op_nstricteq(Instruction * currentInstruction,Vector<SlowCaseEntry>::iterator & iter)1453 void JIT::emitSlow_op_nstricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1454 {
1455 linkSlowCase(iter);
1456 linkSlowCase(iter);
1457 JITStubCall stubCall(this, cti_op_nstricteq);
1458 stubCall.addArgument(regT0);
1459 stubCall.addArgument(regT1);
1460 stubCall.call(currentInstruction[1].u.operand);
1461 }
1462
emitSlow_op_check_has_instance(Instruction * currentInstruction,Vector<SlowCaseEntry>::iterator & iter)1463 void JIT::emitSlow_op_check_has_instance(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1464 {
1465 unsigned baseVal = currentInstruction[1].u.operand;
1466
1467 linkSlowCaseIfNotJSCell(iter, baseVal);
1468 linkSlowCase(iter);
1469 JITStubCall stubCall(this, cti_op_check_has_instance);
1470 stubCall.addArgument(baseVal, regT2);
1471 stubCall.call();
1472 }
1473
emitSlow_op_instanceof(Instruction * currentInstruction,Vector<SlowCaseEntry>::iterator & iter)1474 void JIT::emitSlow_op_instanceof(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1475 {
1476 unsigned dst = currentInstruction[1].u.operand;
1477 unsigned value = currentInstruction[2].u.operand;
1478 unsigned baseVal = currentInstruction[3].u.operand;
1479 unsigned proto = currentInstruction[4].u.operand;
1480
1481 linkSlowCaseIfNotJSCell(iter, value);
1482 linkSlowCaseIfNotJSCell(iter, proto);
1483 linkSlowCase(iter);
1484 linkSlowCase(iter);
1485 JITStubCall stubCall(this, cti_op_instanceof);
1486 stubCall.addArgument(value, regT2);
1487 stubCall.addArgument(baseVal, regT2);
1488 stubCall.addArgument(proto, regT2);
1489 stubCall.call(dst);
1490 }
1491
emitSlow_op_call(Instruction * currentInstruction,Vector<SlowCaseEntry>::iterator & iter)1492 void JIT::emitSlow_op_call(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1493 {
1494 compileOpCallSlowCase(currentInstruction, iter, m_callLinkInfoIndex++, op_call);
1495 }
1496
emitSlow_op_call_eval(Instruction * currentInstruction,Vector<SlowCaseEntry>::iterator & iter)1497 void JIT::emitSlow_op_call_eval(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1498 {
1499 compileOpCallSlowCase(currentInstruction, iter, m_callLinkInfoIndex++, op_call_eval);
1500 }
1501
emitSlow_op_call_varargs(Instruction * currentInstruction,Vector<SlowCaseEntry>::iterator & iter)1502 void JIT::emitSlow_op_call_varargs(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1503 {
1504 compileOpCallVarargsSlowCase(currentInstruction, iter);
1505 }
1506
emitSlow_op_construct(Instruction * currentInstruction,Vector<SlowCaseEntry>::iterator & iter)1507 void JIT::emitSlow_op_construct(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1508 {
1509 compileOpCallSlowCase(currentInstruction, iter, m_callLinkInfoIndex++, op_construct);
1510 }
1511
emitSlow_op_to_jsnumber(Instruction * currentInstruction,Vector<SlowCaseEntry>::iterator & iter)1512 void JIT::emitSlow_op_to_jsnumber(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1513 {
1514 linkSlowCaseIfNotJSCell(iter, currentInstruction[2].u.operand);
1515 linkSlowCase(iter);
1516
1517 JITStubCall stubCall(this, cti_op_to_jsnumber);
1518 stubCall.addArgument(regT0);
1519 stubCall.call(currentInstruction[1].u.operand);
1520 }
1521
emit_op_get_arguments_length(Instruction * currentInstruction)1522 void JIT::emit_op_get_arguments_length(Instruction* currentInstruction)
1523 {
1524 int dst = currentInstruction[1].u.operand;
1525 int argumentsRegister = currentInstruction[2].u.operand;
1526 addSlowCase(branchTestPtr(NonZero, addressFor(argumentsRegister)));
1527 emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT0);
1528 sub32(TrustedImm32(1), regT0);
1529 emitFastArithReTagImmediate(regT0, regT0);
1530 emitPutVirtualRegister(dst, regT0);
1531 }
1532
emitSlow_op_get_arguments_length(Instruction * currentInstruction,Vector<SlowCaseEntry>::iterator & iter)1533 void JIT::emitSlow_op_get_arguments_length(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1534 {
1535 linkSlowCase(iter);
1536 unsigned dst = currentInstruction[1].u.operand;
1537 unsigned base = currentInstruction[2].u.operand;
1538 Identifier* ident = &(m_codeBlock->identifier(currentInstruction[3].u.operand));
1539
1540 emitGetVirtualRegister(base, regT0);
1541 JITStubCall stubCall(this, cti_op_get_by_id_generic);
1542 stubCall.addArgument(regT0);
1543 stubCall.addArgument(TrustedImmPtr(ident));
1544 stubCall.call(dst);
1545 }
1546
emit_op_get_argument_by_val(Instruction * currentInstruction)1547 void JIT::emit_op_get_argument_by_val(Instruction* currentInstruction)
1548 {
1549 int dst = currentInstruction[1].u.operand;
1550 int argumentsRegister = currentInstruction[2].u.operand;
1551 int property = currentInstruction[3].u.operand;
1552 addSlowCase(branchTestPtr(NonZero, addressFor(argumentsRegister)));
1553 emitGetVirtualRegister(property, regT1);
1554 addSlowCase(emitJumpIfNotImmediateInteger(regT1));
1555 add32(TrustedImm32(1), regT1);
1556 // regT1 now contains the integer index of the argument we want, including this
1557 emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT2);
1558 addSlowCase(branch32(AboveOrEqual, regT1, regT2));
1559
1560 Jump skipOutofLineParams;
1561 int numArgs = m_codeBlock->m_numParameters;
1562 if (numArgs) {
1563 Jump notInInPlaceArgs = branch32(AboveOrEqual, regT1, Imm32(numArgs));
1564 addPtr(Imm32(static_cast<unsigned>(-(RegisterFile::CallFrameHeaderSize + numArgs) * sizeof(Register))), callFrameRegister, regT0);
1565 loadPtr(BaseIndex(regT0, regT1, TimesEight, 0), regT0);
1566 skipOutofLineParams = jump();
1567 notInInPlaceArgs.link(this);
1568 }
1569
1570 addPtr(Imm32(static_cast<unsigned>(-(RegisterFile::CallFrameHeaderSize + numArgs) * sizeof(Register))), callFrameRegister, regT0);
1571 mul32(TrustedImm32(sizeof(Register)), regT2, regT2);
1572 subPtr(regT2, regT0);
1573 loadPtr(BaseIndex(regT0, regT1, TimesEight, 0), regT0);
1574 if (numArgs)
1575 skipOutofLineParams.link(this);
1576 emitPutVirtualRegister(dst, regT0);
1577 }
1578
emitSlow_op_get_argument_by_val(Instruction * currentInstruction,Vector<SlowCaseEntry>::iterator & iter)1579 void JIT::emitSlow_op_get_argument_by_val(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1580 {
1581 unsigned dst = currentInstruction[1].u.operand;
1582 unsigned arguments = currentInstruction[2].u.operand;
1583 unsigned property = currentInstruction[3].u.operand;
1584
1585 linkSlowCase(iter);
1586 Jump skipArgumentsCreation = jump();
1587
1588 linkSlowCase(iter);
1589 linkSlowCase(iter);
1590 if (m_codeBlock->m_numParameters == 1)
1591 JITStubCall(this, cti_op_create_arguments_no_params).call();
1592 else
1593 JITStubCall(this, cti_op_create_arguments).call();
1594 emitPutVirtualRegister(arguments);
1595 emitPutVirtualRegister(unmodifiedArgumentsRegister(arguments));
1596
1597 skipArgumentsCreation.link(this);
1598 JITStubCall stubCall(this, cti_op_get_by_val);
1599 stubCall.addArgument(arguments, regT2);
1600 stubCall.addArgument(property, regT2);
1601 stubCall.call(dst);
1602 }
1603
1604 #endif // USE(JSVALUE64)
1605
emit_op_resolve_global_dynamic(Instruction * currentInstruction)1606 void JIT::emit_op_resolve_global_dynamic(Instruction* currentInstruction)
1607 {
1608 int skip = currentInstruction[5].u.operand;
1609
1610 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT0);
1611
1612 bool checkTopLevel = m_codeBlock->codeType() == FunctionCode && m_codeBlock->needsFullScopeChain();
1613 ASSERT(skip || !checkTopLevel);
1614 if (checkTopLevel && skip--) {
1615 Jump activationNotCreated;
1616 if (checkTopLevel)
1617 activationNotCreated = branchTestPtr(Zero, addressFor(m_codeBlock->activationRegister()));
1618 loadPtr(Address(regT0, OBJECT_OFFSETOF(ScopeChainNode, object)), regT1);
1619 addSlowCase(checkStructure(regT1, m_globalData->activationStructure.get()));
1620 loadPtr(Address(regT0, OBJECT_OFFSETOF(ScopeChainNode, next)), regT0);
1621 activationNotCreated.link(this);
1622 }
1623 while (skip--) {
1624 loadPtr(Address(regT0, OBJECT_OFFSETOF(ScopeChainNode, object)), regT1);
1625 addSlowCase(checkStructure(regT1, m_globalData->activationStructure.get()));
1626 loadPtr(Address(regT0, OBJECT_OFFSETOF(ScopeChainNode, next)), regT0);
1627 }
1628 emit_op_resolve_global(currentInstruction, true);
1629 }
1630
emitSlow_op_resolve_global_dynamic(Instruction * currentInstruction,Vector<SlowCaseEntry>::iterator & iter)1631 void JIT::emitSlow_op_resolve_global_dynamic(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1632 {
1633 unsigned dst = currentInstruction[1].u.operand;
1634 Identifier* ident = &m_codeBlock->identifier(currentInstruction[2].u.operand);
1635 int skip = currentInstruction[5].u.operand;
1636 while (skip--)
1637 linkSlowCase(iter);
1638 JITStubCall resolveStubCall(this, cti_op_resolve);
1639 resolveStubCall.addArgument(TrustedImmPtr(ident));
1640 resolveStubCall.call(dst);
1641 emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_resolve_global_dynamic));
1642
1643 unsigned currentIndex = m_globalResolveInfoIndex++;
1644
1645 linkSlowCase(iter); // We managed to skip all the nodes in the scope chain, but the cache missed.
1646 JITStubCall stubCall(this, cti_op_resolve_global);
1647 stubCall.addArgument(TrustedImmPtr(ident));
1648 stubCall.addArgument(Imm32(currentIndex));
1649 stubCall.addArgument(regT0);
1650 stubCall.call(dst);
1651 }
1652
emit_op_new_regexp(Instruction * currentInstruction)1653 void JIT::emit_op_new_regexp(Instruction* currentInstruction)
1654 {
1655 JITStubCall stubCall(this, cti_op_new_regexp);
1656 stubCall.addArgument(TrustedImmPtr(m_codeBlock->regexp(currentInstruction[2].u.operand)));
1657 stubCall.call(currentInstruction[1].u.operand);
1658 }
1659
emit_op_load_varargs(Instruction * currentInstruction)1660 void JIT::emit_op_load_varargs(Instruction* currentInstruction)
1661 {
1662 int argCountDst = currentInstruction[1].u.operand;
1663 int argsOffset = currentInstruction[2].u.operand;
1664 int registerOffset = currentInstruction[3].u.operand;
1665 ASSERT(argsOffset <= registerOffset);
1666
1667 int expectedParams = m_codeBlock->m_numParameters - 1;
1668 // Don't do inline copying if we aren't guaranteed to have a single stream
1669 // of arguments
1670 if (expectedParams) {
1671 JITStubCall stubCall(this, cti_op_load_varargs);
1672 stubCall.addArgument(Imm32(argsOffset));
1673 stubCall.call();
1674 // Stores a naked int32 in the register file.
1675 store32(returnValueRegister, Address(callFrameRegister, argCountDst * sizeof(Register)));
1676 return;
1677 }
1678
1679 #if USE(JSVALUE32_64)
1680 addSlowCase(branch32(NotEqual, tagFor(argsOffset), TrustedImm32(JSValue::EmptyValueTag)));
1681 #else
1682 addSlowCase(branchTestPtr(NonZero, addressFor(argsOffset)));
1683 #endif
1684 // Load arg count into regT0
1685 emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT0);
1686 store32(TrustedImm32(Int32Tag), intTagFor(argCountDst));
1687 store32(regT0, intPayloadFor(argCountDst));
1688 Jump endBranch = branch32(Equal, regT0, TrustedImm32(1));
1689
1690 mul32(TrustedImm32(sizeof(Register)), regT0, regT3);
1691 addPtr(TrustedImm32(static_cast<unsigned>(sizeof(Register) - RegisterFile::CallFrameHeaderSize * sizeof(Register))), callFrameRegister, regT1);
1692 subPtr(regT3, regT1); // regT1 is now the start of the out of line arguments
1693 addPtr(Imm32(argsOffset * sizeof(Register)), callFrameRegister, regT2); // regT2 is the target buffer
1694
1695 // Bounds check the registerfile
1696 addPtr(regT2, regT3);
1697 addPtr(Imm32((registerOffset - argsOffset) * sizeof(Register)), regT3);
1698 addSlowCase(branchPtr(Below, AbsoluteAddress(m_globalData->interpreter->registerFile().addressOfEnd()), regT3));
1699
1700 sub32(TrustedImm32(1), regT0);
1701 Label loopStart = label();
1702 loadPtr(BaseIndex(regT1, regT0, TimesEight, static_cast<unsigned>(0 - 2 * sizeof(Register))), regT3);
1703 storePtr(regT3, BaseIndex(regT2, regT0, TimesEight, static_cast<unsigned>(0 - sizeof(Register))));
1704 #if USE(JSVALUE32_64)
1705 loadPtr(BaseIndex(regT1, regT0, TimesEight, static_cast<unsigned>(sizeof(void*) - 2 * sizeof(Register))), regT3);
1706 storePtr(regT3, BaseIndex(regT2, regT0, TimesEight, static_cast<unsigned>(sizeof(void*) - sizeof(Register))));
1707 #endif
1708 branchSubPtr(NonZero, TrustedImm32(1), regT0).linkTo(loopStart, this);
1709 endBranch.link(this);
1710 }
1711
emitSlow_op_load_varargs(Instruction * currentInstruction,Vector<SlowCaseEntry>::iterator & iter)1712 void JIT::emitSlow_op_load_varargs(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1713 {
1714 int argCountDst = currentInstruction[1].u.operand;
1715 int argsOffset = currentInstruction[2].u.operand;
1716 int expectedParams = m_codeBlock->m_numParameters - 1;
1717 if (expectedParams)
1718 return;
1719
1720 linkSlowCase(iter);
1721 linkSlowCase(iter);
1722 JITStubCall stubCall(this, cti_op_load_varargs);
1723 stubCall.addArgument(Imm32(argsOffset));
1724 stubCall.call();
1725
1726 store32(TrustedImm32(Int32Tag), intTagFor(argCountDst));
1727 store32(returnValueRegister, intPayloadFor(argCountDst));
1728 }
1729
emit_op_new_func(Instruction * currentInstruction)1730 void JIT::emit_op_new_func(Instruction* currentInstruction)
1731 {
1732 Jump lazyJump;
1733 int dst = currentInstruction[1].u.operand;
1734 if (currentInstruction[3].u.operand) {
1735 #if USE(JSVALUE32_64)
1736 lazyJump = branch32(NotEqual, tagFor(dst), TrustedImm32(JSValue::EmptyValueTag));
1737 #else
1738 lazyJump = branchTestPtr(NonZero, addressFor(dst));
1739 #endif
1740 }
1741 JITStubCall stubCall(this, cti_op_new_func);
1742 stubCall.addArgument(TrustedImmPtr(m_codeBlock->functionDecl(currentInstruction[2].u.operand)));
1743 stubCall.call(currentInstruction[1].u.operand);
1744 if (currentInstruction[3].u.operand)
1745 lazyJump.link(this);
1746 }
1747
1748 } // namespace JSC
1749
1750 #endif // ENABLE(JIT)
1751