2 * Copyright (C) 2009 Apple Inc. All rights reserved.
3 * Copyright (C) 2010 Patrick Gansterer <paroga@paroga.com>
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions
8 * 1. Redistributions of source code must retain the above copyright
9 * notice, this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright
11 * notice, this list of conditions and the following disclaimer in the
12 * documentation and/or other materials provided with the distribution.
14 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
15 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
16 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
17 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
18 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
19 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
20 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
21 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
22 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
23 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
24 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
31 #include "Arguments.h"
33 #include "JITInlineMethods.h"
34 #include "JITStubCall.h"
37 #include "JSFunction.h"
38 #include "JSPropertyNameIterator.h"
39 #include "LinkBuffer.h"
45 PassRefPtr<ExecutableMemoryHandle> JIT::privateCompileCTIMachineTrampolines(JSGlobalData* globalData, TrampolineStructure *trampolines)
47 // (2) The second function provides fast property access for string length
48 Label stringLengthBegin = align();
50 // Check eax is a string
51 Jump string_failureCases1 = emitJumpIfNotJSCell(regT0);
52 Jump string_failureCases2 = branchPtr(NotEqual, Address(regT0), TrustedImmPtr(m_globalData->jsStringVPtr));
54 // Checks out okay! - get the length from the Ustring.
55 load32(Address(regT0, OBJECT_OFFSETOF(JSString, m_length)), regT0);
57 Jump string_failureCases3 = branch32(LessThan, regT0, TrustedImm32(0));
59 // regT0 contains a 64 bit value (is positive, is zero extended) so we don't need sign extend here.
60 emitFastArithIntToImmNoCheck(regT0, regT0);
64 // (3) Trampolines for the slow cases of op_call / op_call_eval / op_construct.
65 COMPILE_ASSERT(sizeof(CodeType) == 4, CodeTypeEnumMustBe32Bit);
67 // VirtualCallLink Trampoline
68 // regT0 holds callee, regT1 holds argCount. regT2 will hold the FunctionExecutable.
69 JumpList callLinkFailures;
70 Label virtualCallLinkBegin = align();
71 compileOpCallInitializeCallFrame();
72 preserveReturnAddressAfterCall(regT3);
73 emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
74 restoreArgumentReference();
75 Call callLazyLinkCall = call();
76 callLinkFailures.append(branchTestPtr(Zero, regT0));
77 restoreReturnAddressBeforeReturn(regT3);
78 emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT1);
81 // VirtualConstructLink Trampoline
82 // regT0 holds callee, regT1 holds argCount. regT2 will hold the FunctionExecutable.
83 Label virtualConstructLinkBegin = align();
84 compileOpCallInitializeCallFrame();
85 preserveReturnAddressAfterCall(regT3);
86 emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
87 restoreArgumentReference();
88 Call callLazyLinkConstruct = call();
89 callLinkFailures.append(branchTestPtr(Zero, regT0));
90 restoreReturnAddressBeforeReturn(regT3);
91 emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT1);
94 // VirtualCall Trampoline
95 // regT0 holds callee, regT1 holds argCount. regT2 will hold the FunctionExecutable.
96 Label virtualCallBegin = align();
97 compileOpCallInitializeCallFrame();
99 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
101 Jump hasCodeBlock3 = branch32(GreaterThanOrEqual, Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParametersForCall)), TrustedImm32(0));
102 preserveReturnAddressAfterCall(regT3);
103 restoreArgumentReference();
104 Call callCompileCall = call();
105 callLinkFailures.append(branchTestPtr(Zero, regT0));
106 emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT1);
107 restoreReturnAddressBeforeReturn(regT3);
108 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
109 hasCodeBlock3.link(this);
111 loadPtr(Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_jitCodeForCallWithArityCheck)), regT0);
114 // VirtualConstruct Trampoline
115 // regT0 holds callee, regT1 holds argCount. regT2 will hold the FunctionExecutable.
116 Label virtualConstructBegin = align();
117 compileOpCallInitializeCallFrame();
119 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
121 Jump hasCodeBlock4 = branch32(GreaterThanOrEqual, Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParametersForConstruct)), TrustedImm32(0));
122 preserveReturnAddressAfterCall(regT3);
123 restoreArgumentReference();
124 Call callCompileConstruct = call();
125 callLinkFailures.append(branchTestPtr(Zero, regT0));
126 emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT1);
127 restoreReturnAddressBeforeReturn(regT3);
128 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
129 hasCodeBlock4.link(this);
131 loadPtr(Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_jitCodeForConstructWithArityCheck)), regT0);
134 // If the parser fails we want to be able to be able to keep going,
135 // So we handle this as a parse failure.
136 callLinkFailures.link(this);
137 emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC, regT1);
138 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, callFrameRegister);
139 restoreReturnAddressBeforeReturn(regT1);
140 move(TrustedImmPtr(&globalData->exceptionLocation), regT2);
141 storePtr(regT1, regT2);
142 poke(callFrameRegister, 1 + OBJECT_OFFSETOF(struct JITStackFrame, callFrame) / sizeof(void*));
143 storePtr(callFrameRegister, &m_globalData->topCallFrame);
144 poke(TrustedImmPtr(FunctionPtr(ctiVMThrowTrampoline).value()));
147 // NativeCall Trampoline
148 Label nativeCallThunk = privateCompileCTINativeCall(globalData);
149 Label nativeConstructThunk = privateCompileCTINativeCall(globalData, true);
151 Call string_failureCases1Call = makeTailRecursiveCall(string_failureCases1);
152 Call string_failureCases2Call = makeTailRecursiveCall(string_failureCases2);
153 Call string_failureCases3Call = makeTailRecursiveCall(string_failureCases3);
155 // All trampolines constructed! copy the code, link up calls, and set the pointers on the Machine object.
156 LinkBuffer patchBuffer(*m_globalData, this);
158 patchBuffer.link(string_failureCases1Call, FunctionPtr(cti_op_get_by_id_string_fail));
159 patchBuffer.link(string_failureCases2Call, FunctionPtr(cti_op_get_by_id_string_fail));
160 patchBuffer.link(string_failureCases3Call, FunctionPtr(cti_op_get_by_id_string_fail));
161 patchBuffer.link(callLazyLinkCall, FunctionPtr(cti_vm_lazyLinkCall));
162 patchBuffer.link(callLazyLinkConstruct, FunctionPtr(cti_vm_lazyLinkConstruct));
163 patchBuffer.link(callCompileCall, FunctionPtr(cti_op_call_jitCompile));
164 patchBuffer.link(callCompileConstruct, FunctionPtr(cti_op_construct_jitCompile));
166 CodeRef finalCode = patchBuffer.finalizeCode();
167 RefPtr<ExecutableMemoryHandle> executableMemory = finalCode.executableMemory();
169 trampolines->ctiVirtualCallLink = patchBuffer.trampolineAt(virtualCallLinkBegin);
170 trampolines->ctiVirtualConstructLink = patchBuffer.trampolineAt(virtualConstructLinkBegin);
171 trampolines->ctiVirtualCall = patchBuffer.trampolineAt(virtualCallBegin);
172 trampolines->ctiVirtualConstruct = patchBuffer.trampolineAt(virtualConstructBegin);
173 trampolines->ctiNativeCall = patchBuffer.trampolineAt(nativeCallThunk);
174 trampolines->ctiNativeConstruct = patchBuffer.trampolineAt(nativeConstructThunk);
175 trampolines->ctiStringLengthTrampoline = patchBuffer.trampolineAt(stringLengthBegin);
177 return executableMemory.release();
180 JIT::Label JIT::privateCompileCTINativeCall(JSGlobalData* globalData, bool isConstruct)
182 int executableOffsetToFunction = isConstruct ? OBJECT_OFFSETOF(NativeExecutable, m_constructor) : OBJECT_OFFSETOF(NativeExecutable, m_function);
184 Label nativeCallThunk = align();
186 emitPutImmediateToCallFrameHeader(0, RegisterFile::CodeBlock);
189 // Load caller frame's scope chain into this callframe so that whatever we call can
190 // get to its global data.
191 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT0);
192 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT0);
193 emitPutCellToCallFrameHeader(regT1, RegisterFile::ScopeChain);
196 emitPutToCallFrameHeader(regT1, RegisterFile::ReturnPC);
198 // Calling convention: f(edi, esi, edx, ecx, ...);
199 // Host function signature: f(ExecState*);
200 move(callFrameRegister, X86Registers::edi);
202 subPtr(TrustedImm32(16 - sizeof(void*)), stackPointerRegister); // Align stack after call.
204 emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, X86Registers::esi);
205 loadPtr(Address(X86Registers::esi, OBJECT_OFFSETOF(JSFunction, m_executable)), X86Registers::r9);
206 move(regT0, callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack.
207 call(Address(X86Registers::r9, executableOffsetToFunction));
209 addPtr(TrustedImm32(16 - sizeof(void*)), stackPointerRegister);
212 // Load caller frame's scope chain into this callframe so that whatever we call can
213 // get to its global data.
214 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT2);
215 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT2);
216 emitPutCellToCallFrameHeader(regT1, RegisterFile::ScopeChain);
218 preserveReturnAddressAfterCall(regT3); // Callee preserved
219 emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
221 // Calling convention: f(r0 == regT0, r1 == regT1, ...);
222 // Host function signature: f(ExecState*);
223 move(callFrameRegister, ARMRegisters::r0);
225 emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, ARMRegisters::r1);
226 move(regT2, callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack.
227 loadPtr(Address(ARMRegisters::r1, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
228 call(Address(regT2, executableOffsetToFunction));
230 restoreReturnAddressBeforeReturn(regT3);
233 // Load caller frame's scope chain into this callframe so that whatever we call can
234 // get to its global data.
235 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT0);
236 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT0);
237 emitPutCellToCallFrameHeader(regT1, RegisterFile::ScopeChain);
239 preserveReturnAddressAfterCall(regT3); // Callee preserved
240 emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
242 // Calling convention: f(a0, a1, a2, a3);
243 // Host function signature: f(ExecState*);
245 // Allocate stack space for 16 bytes (8-byte aligned)
246 // 16 bytes (unused) for 4 arguments
247 subPtr(TrustedImm32(16), stackPointerRegister);
250 move(callFrameRegister, MIPSRegisters::a0);
253 emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, MIPSRegisters::a2);
254 loadPtr(Address(MIPSRegisters::a2, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
255 move(regT0, callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack.
256 call(Address(regT2, executableOffsetToFunction));
258 // Restore stack space
259 addPtr(TrustedImm32(16), stackPointerRegister);
261 restoreReturnAddressBeforeReturn(regT3);
264 #error "JIT not supported on this platform."
265 UNUSED_PARAM(executableOffsetToFunction);
269 // Check for an exception
270 loadPtr(&(globalData->exception), regT2);
271 Jump exceptionHandler = branchTestPtr(NonZero, regT2);
276 // Handle an exception
277 exceptionHandler.link(this);
279 // Grab the return address.
280 preserveReturnAddressAfterCall(regT1);
282 move(TrustedImmPtr(&globalData->exceptionLocation), regT2);
283 storePtr(regT1, regT2);
284 poke(callFrameRegister, OBJECT_OFFSETOF(struct JITStackFrame, callFrame) / sizeof(void*));
286 storePtr(callFrameRegister, &m_globalData->topCallFrame);
287 // Set the return address.
288 move(TrustedImmPtr(FunctionPtr(ctiVMThrowTrampoline).value()), regT1);
289 restoreReturnAddressBeforeReturn(regT1);
293 return nativeCallThunk;
296 JIT::CodeRef JIT::privateCompileCTINativeCall(JSGlobalData* globalData, NativeFunction)
298 return CodeRef::createSelfManagedCodeRef(globalData->jitStubs->ctiNativeCall());
301 void JIT::emit_op_mov(Instruction* currentInstruction)
303 int dst = currentInstruction[1].u.operand;
304 int src = currentInstruction[2].u.operand;
306 if (m_codeBlock->isConstantRegisterIndex(src)) {
307 storePtr(ImmPtr(JSValue::encode(getConstantOperand(src))), Address(callFrameRegister, dst * sizeof(Register)));
308 if (dst == m_lastResultBytecodeRegister)
309 killLastResultRegister();
310 } else if ((src == m_lastResultBytecodeRegister) || (dst == m_lastResultBytecodeRegister)) {
311 // If either the src or dst is the cached register go though
312 // get/put registers to make sure we track this correctly.
313 emitGetVirtualRegister(src, regT0);
314 emitPutVirtualRegister(dst);
316 // Perform the copy via regT1; do not disturb any mapping in regT0.
317 loadPtr(Address(callFrameRegister, src * sizeof(Register)), regT1);
318 storePtr(regT1, Address(callFrameRegister, dst * sizeof(Register)));
322 void JIT::emit_op_end(Instruction* currentInstruction)
324 ASSERT(returnValueRegister != callFrameRegister);
325 emitGetVirtualRegister(currentInstruction[1].u.operand, returnValueRegister);
326 restoreReturnAddressBeforeReturn(Address(callFrameRegister, RegisterFile::ReturnPC * static_cast<int>(sizeof(Register))));
330 void JIT::emit_op_jmp(Instruction* currentInstruction)
332 unsigned target = currentInstruction[1].u.operand;
333 addJump(jump(), target);
336 void JIT::emit_op_new_object(Instruction* currentInstruction)
338 emitAllocateJSFinalObject(ImmPtr(m_codeBlock->globalObject()->emptyObjectStructure()), regT0, regT1);
340 emitPutVirtualRegister(currentInstruction[1].u.operand);
343 void JIT::emitSlow_op_new_object(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
346 JITStubCall(this, cti_op_new_object).call(currentInstruction[1].u.operand);
349 void JIT::emit_op_check_has_instance(Instruction* currentInstruction)
351 unsigned baseVal = currentInstruction[1].u.operand;
353 emitGetVirtualRegister(baseVal, regT0);
355 // Check that baseVal is a cell.
356 emitJumpSlowCaseIfNotJSCell(regT0, baseVal);
358 // Check that baseVal 'ImplementsHasInstance'.
359 loadPtr(Address(regT0, JSCell::structureOffset()), regT0);
360 addSlowCase(branchTest8(Zero, Address(regT0, Structure::typeInfoFlagsOffset()), TrustedImm32(ImplementsHasInstance)));
363 void JIT::emit_op_instanceof(Instruction* currentInstruction)
365 unsigned dst = currentInstruction[1].u.operand;
366 unsigned value = currentInstruction[2].u.operand;
367 unsigned baseVal = currentInstruction[3].u.operand;
368 unsigned proto = currentInstruction[4].u.operand;
370 // Load the operands (baseVal, proto, and value respectively) into registers.
371 // We use regT0 for baseVal since we will be done with this first, and we can then use it for the result.
372 emitGetVirtualRegister(value, regT2);
373 emitGetVirtualRegister(baseVal, regT0);
374 emitGetVirtualRegister(proto, regT1);
376 // Check that proto are cells. baseVal must be a cell - this is checked by op_check_has_instance.
377 emitJumpSlowCaseIfNotJSCell(regT2, value);
378 emitJumpSlowCaseIfNotJSCell(regT1, proto);
380 // Check that prototype is an object
381 loadPtr(Address(regT1, JSCell::structureOffset()), regT3);
382 addSlowCase(emitJumpIfNotObject(regT3));
384 // Fixme: this check is only needed because the JSC API allows HasInstance to be overridden; we should deprecate this.
385 // Check that baseVal 'ImplementsDefaultHasInstance'.
386 loadPtr(Address(regT0, JSCell::structureOffset()), regT0);
387 addSlowCase(branchTest8(Zero, Address(regT0, Structure::typeInfoFlagsOffset()), TrustedImm32(ImplementsDefaultHasInstance)));
389 // Optimistically load the result true, and start looping.
390 // Initially, regT1 still contains proto and regT2 still contains value.
391 // As we loop regT2 will be updated with its prototype, recursively walking the prototype chain.
392 move(TrustedImmPtr(JSValue::encode(jsBoolean(true))), regT0);
395 // Load the prototype of the object in regT2. If this is equal to regT1 - WIN!
396 // Otherwise, check if we've hit null - if we have then drop out of the loop, if not go again.
397 loadPtr(Address(regT2, JSCell::structureOffset()), regT2);
398 loadPtr(Address(regT2, Structure::prototypeOffset()), regT2);
399 Jump isInstance = branchPtr(Equal, regT2, regT1);
400 emitJumpIfJSCell(regT2).linkTo(loop, this);
402 // We get here either by dropping out of the loop, or if value was not an Object. Result is false.
403 move(TrustedImmPtr(JSValue::encode(jsBoolean(false))), regT0);
405 // isInstance jumps right down to here, to skip setting the result to false (it has already set true).
406 isInstance.link(this);
407 emitPutVirtualRegister(dst);
410 void JIT::emit_op_call(Instruction* currentInstruction)
412 compileOpCall(op_call, currentInstruction, m_callLinkInfoIndex++);
415 void JIT::emit_op_call_eval(Instruction* currentInstruction)
417 compileOpCall(op_call_eval, currentInstruction, m_callLinkInfoIndex++);
420 void JIT::emit_op_call_varargs(Instruction* currentInstruction)
422 compileOpCallVarargs(currentInstruction);
425 void JIT::emit_op_construct(Instruction* currentInstruction)
427 compileOpCall(op_construct, currentInstruction, m_callLinkInfoIndex++);
430 void JIT::emit_op_tear_off_activation(Instruction* currentInstruction)
432 unsigned activation = currentInstruction[1].u.operand;
433 unsigned arguments = currentInstruction[2].u.operand;
434 Jump activationCreated = branchTestPtr(NonZero, addressFor(activation));
435 Jump argumentsNotCreated = branchTestPtr(Zero, addressFor(arguments));
436 activationCreated.link(this);
437 JITStubCall stubCall(this, cti_op_tear_off_activation);
438 stubCall.addArgument(activation, regT2);
439 stubCall.addArgument(unmodifiedArgumentsRegister(arguments), regT2);
441 argumentsNotCreated.link(this);
444 void JIT::emit_op_tear_off_arguments(Instruction* currentInstruction)
446 unsigned dst = currentInstruction[1].u.operand;
448 Jump argsNotCreated = branchTestPtr(Zero, Address(callFrameRegister, sizeof(Register) * (unmodifiedArgumentsRegister(dst))));
449 JITStubCall stubCall(this, cti_op_tear_off_arguments);
450 stubCall.addArgument(unmodifiedArgumentsRegister(dst), regT2);
452 argsNotCreated.link(this);
455 void JIT::emit_op_ret(Instruction* currentInstruction)
457 emitOptimizationCheck(RetOptimizationCheck);
459 ASSERT(callFrameRegister != regT1);
460 ASSERT(regT1 != returnValueRegister);
461 ASSERT(returnValueRegister != callFrameRegister);
463 // Return the result in %eax.
464 emitGetVirtualRegister(currentInstruction[1].u.operand, returnValueRegister);
466 // Grab the return address.
467 emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC, regT1);
469 // Restore our caller's "r".
470 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, callFrameRegister);
473 restoreReturnAddressBeforeReturn(regT1);
477 void JIT::emit_op_ret_object_or_this(Instruction* currentInstruction)
479 emitOptimizationCheck(RetOptimizationCheck);
481 ASSERT(callFrameRegister != regT1);
482 ASSERT(regT1 != returnValueRegister);
483 ASSERT(returnValueRegister != callFrameRegister);
485 // Return the result in %eax.
486 emitGetVirtualRegister(currentInstruction[1].u.operand, returnValueRegister);
487 Jump notJSCell = emitJumpIfNotJSCell(returnValueRegister);
488 loadPtr(Address(returnValueRegister, JSCell::structureOffset()), regT2);
489 Jump notObject = emitJumpIfNotObject(regT2);
491 // Grab the return address.
492 emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC, regT1);
494 // Restore our caller's "r".
495 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, callFrameRegister);
498 restoreReturnAddressBeforeReturn(regT1);
501 // Return 'this' in %eax.
502 notJSCell.link(this);
503 notObject.link(this);
504 emitGetVirtualRegister(currentInstruction[2].u.operand, returnValueRegister);
506 // Grab the return address.
507 emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC, regT1);
509 // Restore our caller's "r".
510 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, callFrameRegister);
513 restoreReturnAddressBeforeReturn(regT1);
517 void JIT::emit_op_resolve(Instruction* currentInstruction)
519 JITStubCall stubCall(this, cti_op_resolve);
520 stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
521 stubCall.call(currentInstruction[1].u.operand);
524 void JIT::emit_op_to_primitive(Instruction* currentInstruction)
526 int dst = currentInstruction[1].u.operand;
527 int src = currentInstruction[2].u.operand;
529 emitGetVirtualRegister(src, regT0);
531 Jump isImm = emitJumpIfNotJSCell(regT0);
532 addSlowCase(branchPtr(NotEqual, Address(regT0), TrustedImmPtr(m_globalData->jsStringVPtr)));
536 emitPutVirtualRegister(dst);
540 void JIT::emit_op_strcat(Instruction* currentInstruction)
542 JITStubCall stubCall(this, cti_op_strcat);
543 stubCall.addArgument(Imm32(currentInstruction[2].u.operand));
544 stubCall.addArgument(Imm32(currentInstruction[3].u.operand));
545 stubCall.call(currentInstruction[1].u.operand);
548 void JIT::emit_op_resolve_base(Instruction* currentInstruction)
550 JITStubCall stubCall(this, currentInstruction[3].u.operand ? cti_op_resolve_base_strict_put : cti_op_resolve_base);
551 stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
552 stubCall.call(currentInstruction[1].u.operand);
555 void JIT::emit_op_ensure_property_exists(Instruction* currentInstruction)
557 JITStubCall stubCall(this, cti_op_ensure_property_exists);
558 stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
559 stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
560 stubCall.call(currentInstruction[1].u.operand);
563 void JIT::emit_op_resolve_skip(Instruction* currentInstruction)
565 JITStubCall stubCall(this, cti_op_resolve_skip);
566 stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
567 stubCall.addArgument(Imm32(currentInstruction[3].u.operand));
568 stubCall.call(currentInstruction[1].u.operand);
571 void JIT::emit_op_resolve_global(Instruction* currentInstruction, bool)
574 void* globalObject = m_codeBlock->globalObject();
575 unsigned currentIndex = m_globalResolveInfoIndex++;
576 GlobalResolveInfo* resolveInfoAddress = &(m_codeBlock->globalResolveInfo(currentIndex));
578 // Check Structure of global object
579 move(TrustedImmPtr(globalObject), regT0);
580 move(TrustedImmPtr(resolveInfoAddress), regT2);
581 loadPtr(Address(regT2, OBJECT_OFFSETOF(GlobalResolveInfo, structure)), regT1);
582 addSlowCase(branchPtr(NotEqual, regT1, Address(regT0, JSCell::structureOffset()))); // Structures don't match
584 // Load cached property
585 // Assume that the global object always uses external storage.
586 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSGlobalObject, m_propertyStorage)), regT0);
587 load32(Address(regT2, OBJECT_OFFSETOF(GlobalResolveInfo, offset)), regT1);
588 loadPtr(BaseIndex(regT0, regT1, ScalePtr), regT0);
589 emitPutVirtualRegister(currentInstruction[1].u.operand);
592 void JIT::emitSlow_op_resolve_global(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
594 unsigned dst = currentInstruction[1].u.operand;
595 Identifier* ident = &m_codeBlock->identifier(currentInstruction[2].u.operand);
597 unsigned currentIndex = m_globalResolveInfoIndex++;
600 JITStubCall stubCall(this, cti_op_resolve_global);
601 stubCall.addArgument(TrustedImmPtr(ident));
602 stubCall.addArgument(Imm32(currentIndex));
603 stubCall.addArgument(regT0);
607 void JIT::emit_op_not(Instruction* currentInstruction)
609 emitGetVirtualRegister(currentInstruction[2].u.operand, regT0);
611 // Invert against JSValue(false); if the value was tagged as a boolean, then all bits will be
612 // clear other than the low bit (which will be 0 or 1 for false or true inputs respectively).
613 // Then invert against JSValue(true), which will add the tag back in, and flip the low bit.
614 xorPtr(TrustedImm32(static_cast<int32_t>(ValueFalse)), regT0);
615 addSlowCase(branchTestPtr(NonZero, regT0, TrustedImm32(static_cast<int32_t>(~1))));
616 xorPtr(TrustedImm32(static_cast<int32_t>(ValueTrue)), regT0);
618 emitPutVirtualRegister(currentInstruction[1].u.operand);
621 void JIT::emit_op_jfalse(Instruction* currentInstruction)
623 unsigned target = currentInstruction[2].u.operand;
624 emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
626 addJump(branchPtr(Equal, regT0, TrustedImmPtr(JSValue::encode(jsNumber(0)))), target);
627 Jump isNonZero = emitJumpIfImmediateInteger(regT0);
629 addJump(branchPtr(Equal, regT0, TrustedImmPtr(JSValue::encode(jsBoolean(false)))), target);
630 addSlowCase(branchPtr(NotEqual, regT0, TrustedImmPtr(JSValue::encode(jsBoolean(true)))));
632 isNonZero.link(this);
635 void JIT::emit_op_jeq_null(Instruction* currentInstruction)
637 unsigned src = currentInstruction[1].u.operand;
638 unsigned target = currentInstruction[2].u.operand;
640 emitGetVirtualRegister(src, regT0);
641 Jump isImmediate = emitJumpIfNotJSCell(regT0);
643 // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
644 loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
645 addJump(branchTest8(NonZero, Address(regT2, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined)), target);
646 Jump wasNotImmediate = jump();
648 // Now handle the immediate cases - undefined & null
649 isImmediate.link(this);
650 andPtr(TrustedImm32(~TagBitUndefined), regT0);
651 addJump(branchPtr(Equal, regT0, TrustedImmPtr(JSValue::encode(jsNull()))), target);
653 wasNotImmediate.link(this);
655 void JIT::emit_op_jneq_null(Instruction* currentInstruction)
657 unsigned src = currentInstruction[1].u.operand;
658 unsigned target = currentInstruction[2].u.operand;
660 emitGetVirtualRegister(src, regT0);
661 Jump isImmediate = emitJumpIfNotJSCell(regT0);
663 // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
664 loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
665 addJump(branchTest8(Zero, Address(regT2, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined)), target);
666 Jump wasNotImmediate = jump();
668 // Now handle the immediate cases - undefined & null
669 isImmediate.link(this);
670 andPtr(TrustedImm32(~TagBitUndefined), regT0);
671 addJump(branchPtr(NotEqual, regT0, TrustedImmPtr(JSValue::encode(jsNull()))), target);
673 wasNotImmediate.link(this);
676 void JIT::emit_op_jneq_ptr(Instruction* currentInstruction)
678 unsigned src = currentInstruction[1].u.operand;
679 JSCell* ptr = currentInstruction[2].u.jsCell.get();
680 unsigned target = currentInstruction[3].u.operand;
682 emitGetVirtualRegister(src, regT0);
683 addJump(branchPtr(NotEqual, regT0, TrustedImmPtr(JSValue::encode(JSValue(ptr)))), target);
686 void JIT::emit_op_jsr(Instruction* currentInstruction)
688 int retAddrDst = currentInstruction[1].u.operand;
689 int target = currentInstruction[2].u.operand;
690 DataLabelPtr storeLocation = storePtrWithPatch(TrustedImmPtr(0), Address(callFrameRegister, sizeof(Register) * retAddrDst));
691 addJump(jump(), target);
692 m_jsrSites.append(JSRInfo(storeLocation, label()));
693 killLastResultRegister();
696 void JIT::emit_op_sret(Instruction* currentInstruction)
698 jump(Address(callFrameRegister, sizeof(Register) * currentInstruction[1].u.operand));
699 killLastResultRegister();
702 void JIT::emit_op_eq(Instruction* currentInstruction)
704 emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
705 emitJumpSlowCaseIfNotImmediateIntegers(regT0, regT1, regT2);
706 compare32(Equal, regT1, regT0, regT0);
707 emitTagAsBoolImmediate(regT0);
708 emitPutVirtualRegister(currentInstruction[1].u.operand);
711 void JIT::emit_op_bitnot(Instruction* currentInstruction)
713 emitGetVirtualRegister(currentInstruction[2].u.operand, regT0);
714 emitJumpSlowCaseIfNotImmediateInteger(regT0);
716 emitFastArithIntToImmNoCheck(regT0, regT0);
717 emitPutVirtualRegister(currentInstruction[1].u.operand);
720 void JIT::emit_op_resolve_with_base(Instruction* currentInstruction)
722 JITStubCall stubCall(this, cti_op_resolve_with_base);
723 stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[3].u.operand)));
724 stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
725 stubCall.call(currentInstruction[2].u.operand);
728 void JIT::emit_op_resolve_with_this(Instruction* currentInstruction)
730 JITStubCall stubCall(this, cti_op_resolve_with_this);
731 stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[3].u.operand)));
732 stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
733 stubCall.call(currentInstruction[2].u.operand);
736 void JIT::emit_op_jtrue(Instruction* currentInstruction)
738 unsigned target = currentInstruction[2].u.operand;
739 emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
741 Jump isZero = branchPtr(Equal, regT0, TrustedImmPtr(JSValue::encode(jsNumber(0))));
742 addJump(emitJumpIfImmediateInteger(regT0), target);
744 addJump(branchPtr(Equal, regT0, TrustedImmPtr(JSValue::encode(jsBoolean(true)))), target);
745 addSlowCase(branchPtr(NotEqual, regT0, TrustedImmPtr(JSValue::encode(jsBoolean(false)))));
750 void JIT::emit_op_neq(Instruction* currentInstruction)
752 emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
753 emitJumpSlowCaseIfNotImmediateIntegers(regT0, regT1, regT2);
754 compare32(NotEqual, regT1, regT0, regT0);
755 emitTagAsBoolImmediate(regT0);
757 emitPutVirtualRegister(currentInstruction[1].u.operand);
761 void JIT::emit_op_bitxor(Instruction* currentInstruction)
763 emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
764 emitJumpSlowCaseIfNotImmediateIntegers(regT0, regT1, regT2);
765 xorPtr(regT1, regT0);
766 emitFastArithReTagImmediate(regT0, regT0);
767 emitPutVirtualRegister(currentInstruction[1].u.operand);
770 void JIT::emit_op_bitor(Instruction* currentInstruction)
772 emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
773 emitJumpSlowCaseIfNotImmediateIntegers(regT0, regT1, regT2);
775 emitPutVirtualRegister(currentInstruction[1].u.operand);
778 void JIT::emit_op_throw(Instruction* currentInstruction)
780 JITStubCall stubCall(this, cti_op_throw);
781 stubCall.addArgument(currentInstruction[1].u.operand, regT2);
783 ASSERT(regT0 == returnValueRegister);
785 // cti_op_throw always changes it's return address,
786 // this point in the code should never be reached.
791 void JIT::emit_op_get_pnames(Instruction* currentInstruction)
793 int dst = currentInstruction[1].u.operand;
794 int base = currentInstruction[2].u.operand;
795 int i = currentInstruction[3].u.operand;
796 int size = currentInstruction[4].u.operand;
797 int breakTarget = currentInstruction[5].u.operand;
799 JumpList isNotObject;
801 emitGetVirtualRegister(base, regT0);
802 if (!m_codeBlock->isKnownNotImmediate(base))
803 isNotObject.append(emitJumpIfNotJSCell(regT0));
804 if (base != m_codeBlock->thisRegister() || m_codeBlock->isStrictMode()) {
805 loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
806 isNotObject.append(emitJumpIfNotObject(regT2));
809 // We could inline the case where you have a valid cache, but
810 // this call doesn't seem to be hot.
811 Label isObject(this);
812 JITStubCall getPnamesStubCall(this, cti_op_get_pnames);
813 getPnamesStubCall.addArgument(regT0);
814 getPnamesStubCall.call(dst);
815 load32(Address(regT0, OBJECT_OFFSETOF(JSPropertyNameIterator, m_jsStringsSize)), regT3);
816 storePtr(tagTypeNumberRegister, payloadFor(i));
817 store32(TrustedImm32(Int32Tag), intTagFor(size));
818 store32(regT3, intPayloadFor(size));
821 isNotObject.link(this);
823 and32(TrustedImm32(~TagBitUndefined), regT1);
824 addJump(branch32(Equal, regT1, TrustedImm32(ValueNull)), breakTarget);
826 JITStubCall toObjectStubCall(this, cti_to_object);
827 toObjectStubCall.addArgument(regT0);
828 toObjectStubCall.call(base);
829 jump().linkTo(isObject, this);
834 void JIT::emit_op_next_pname(Instruction* currentInstruction)
836 int dst = currentInstruction[1].u.operand;
837 int base = currentInstruction[2].u.operand;
838 int i = currentInstruction[3].u.operand;
839 int size = currentInstruction[4].u.operand;
840 int it = currentInstruction[5].u.operand;
841 int target = currentInstruction[6].u.operand;
843 JumpList callHasProperty;
846 load32(intPayloadFor(i), regT0);
847 Jump end = branch32(Equal, regT0, intPayloadFor(size));
850 loadPtr(addressFor(it), regT1);
851 loadPtr(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_jsStrings)), regT2);
853 loadPtr(BaseIndex(regT2, regT0, TimesEight), regT2);
855 emitPutVirtualRegister(dst, regT2);
858 add32(TrustedImm32(1), regT0);
859 store32(regT0, intPayloadFor(i));
861 // Verify that i is valid:
862 emitGetVirtualRegister(base, regT0);
864 // Test base's structure
865 loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
866 callHasProperty.append(branchPtr(NotEqual, regT2, Address(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_cachedStructure)))));
868 // Test base's prototype chain
869 loadPtr(Address(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_cachedPrototypeChain))), regT3);
870 loadPtr(Address(regT3, OBJECT_OFFSETOF(StructureChain, m_vector)), regT3);
871 addJump(branchTestPtr(Zero, Address(regT3)), target);
873 Label checkPrototype(this);
874 loadPtr(Address(regT2, Structure::prototypeOffset()), regT2);
875 callHasProperty.append(emitJumpIfNotJSCell(regT2));
876 loadPtr(Address(regT2, JSCell::structureOffset()), regT2);
877 callHasProperty.append(branchPtr(NotEqual, regT2, Address(regT3)));
878 addPtr(TrustedImm32(sizeof(Structure*)), regT3);
879 branchTestPtr(NonZero, Address(regT3)).linkTo(checkPrototype, this);
882 addJump(jump(), target);
884 // Slow case: Ask the object if i is valid.
885 callHasProperty.link(this);
886 emitGetVirtualRegister(dst, regT1);
887 JITStubCall stubCall(this, cti_has_property);
888 stubCall.addArgument(regT0);
889 stubCall.addArgument(regT1);
892 // Test for valid key.
893 addJump(branchTest32(NonZero, regT0), target);
894 jump().linkTo(begin, this);
900 void JIT::emit_op_push_scope(Instruction* currentInstruction)
902 JITStubCall stubCall(this, cti_op_push_scope);
903 stubCall.addArgument(currentInstruction[1].u.operand, regT2);
904 stubCall.call(currentInstruction[1].u.operand);
907 void JIT::emit_op_pop_scope(Instruction*)
909 JITStubCall(this, cti_op_pop_scope).call();
912 void JIT::compileOpStrictEq(Instruction* currentInstruction, CompileOpStrictEqType type)
914 unsigned dst = currentInstruction[1].u.operand;
915 unsigned src1 = currentInstruction[2].u.operand;
916 unsigned src2 = currentInstruction[3].u.operand;
918 emitGetVirtualRegisters(src1, regT0, src2, regT1);
920 // Jump to a slow case if either operand is a number, or if both are JSCell*s.
923 addSlowCase(emitJumpIfJSCell(regT2));
924 addSlowCase(emitJumpIfImmediateNumber(regT2));
926 if (type == OpStrictEq)
927 compare32(Equal, regT1, regT0, regT0);
929 compare32(NotEqual, regT1, regT0, regT0);
930 emitTagAsBoolImmediate(regT0);
932 emitPutVirtualRegister(dst);
935 void JIT::emit_op_stricteq(Instruction* currentInstruction)
937 compileOpStrictEq(currentInstruction, OpStrictEq);
940 void JIT::emit_op_nstricteq(Instruction* currentInstruction)
942 compileOpStrictEq(currentInstruction, OpNStrictEq);
945 void JIT::emit_op_to_jsnumber(Instruction* currentInstruction)
947 int srcVReg = currentInstruction[2].u.operand;
948 emitGetVirtualRegister(srcVReg, regT0);
950 Jump wasImmediate = emitJumpIfImmediateInteger(regT0);
952 emitJumpSlowCaseIfNotJSCell(regT0, srcVReg);
953 loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
954 addSlowCase(branch8(NotEqual, Address(regT2, Structure::typeInfoTypeOffset()), TrustedImm32(NumberType)));
956 wasImmediate.link(this);
958 emitPutVirtualRegister(currentInstruction[1].u.operand);
961 void JIT::emit_op_push_new_scope(Instruction* currentInstruction)
963 JITStubCall stubCall(this, cti_op_push_new_scope);
964 stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
965 stubCall.addArgument(currentInstruction[3].u.operand, regT2);
966 stubCall.call(currentInstruction[1].u.operand);
969 void JIT::emit_op_catch(Instruction* currentInstruction)
971 killLastResultRegister(); // FIXME: Implicitly treat op_catch as a labeled statement, and remove this line of code.
972 move(regT0, callFrameRegister);
973 peek(regT3, OBJECT_OFFSETOF(struct JITStackFrame, globalData) / sizeof(void*));
974 loadPtr(Address(regT3, OBJECT_OFFSETOF(JSGlobalData, exception)), regT0);
975 storePtr(TrustedImmPtr(JSValue::encode(JSValue())), Address(regT3, OBJECT_OFFSETOF(JSGlobalData, exception)));
976 emitPutVirtualRegister(currentInstruction[1].u.operand);
979 void JIT::emit_op_jmp_scopes(Instruction* currentInstruction)
981 JITStubCall stubCall(this, cti_op_jmp_scopes);
982 stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
984 addJump(jump(), currentInstruction[2].u.operand);
987 void JIT::emit_op_switch_imm(Instruction* currentInstruction)
989 unsigned tableIndex = currentInstruction[1].u.operand;
990 unsigned defaultOffset = currentInstruction[2].u.operand;
991 unsigned scrutinee = currentInstruction[3].u.operand;
993 // create jump table for switch destinations, track this switch statement.
994 SimpleJumpTable* jumpTable = &m_codeBlock->immediateSwitchJumpTable(tableIndex);
995 m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset, SwitchRecord::Immediate));
996 jumpTable->ctiOffsets.grow(jumpTable->branchOffsets.size());
998 JITStubCall stubCall(this, cti_op_switch_imm);
999 stubCall.addArgument(scrutinee, regT2);
1000 stubCall.addArgument(Imm32(tableIndex));
1005 void JIT::emit_op_switch_char(Instruction* currentInstruction)
1007 unsigned tableIndex = currentInstruction[1].u.operand;
1008 unsigned defaultOffset = currentInstruction[2].u.operand;
1009 unsigned scrutinee = currentInstruction[3].u.operand;
1011 // create jump table for switch destinations, track this switch statement.
1012 SimpleJumpTable* jumpTable = &m_codeBlock->characterSwitchJumpTable(tableIndex);
1013 m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset, SwitchRecord::Character));
1014 jumpTable->ctiOffsets.grow(jumpTable->branchOffsets.size());
1016 JITStubCall stubCall(this, cti_op_switch_char);
1017 stubCall.addArgument(scrutinee, regT2);
1018 stubCall.addArgument(Imm32(tableIndex));
1023 void JIT::emit_op_switch_string(Instruction* currentInstruction)
1025 unsigned tableIndex = currentInstruction[1].u.operand;
1026 unsigned defaultOffset = currentInstruction[2].u.operand;
1027 unsigned scrutinee = currentInstruction[3].u.operand;
1029 // create jump table for switch destinations, track this switch statement.
1030 StringJumpTable* jumpTable = &m_codeBlock->stringSwitchJumpTable(tableIndex);
1031 m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset));
1033 JITStubCall stubCall(this, cti_op_switch_string);
1034 stubCall.addArgument(scrutinee, regT2);
1035 stubCall.addArgument(Imm32(tableIndex));
1040 void JIT::emit_op_throw_reference_error(Instruction* currentInstruction)
1042 JITStubCall stubCall(this, cti_op_throw_reference_error);
1043 stubCall.addArgument(ImmPtr(JSValue::encode(m_codeBlock->getConstant(currentInstruction[1].u.operand))));
1047 void JIT::emit_op_debug(Instruction* currentInstruction)
1049 #if ENABLE(DEBUG_WITH_BREAKPOINT)
1050 UNUSED_PARAM(currentInstruction);
1053 JITStubCall stubCall(this, cti_op_debug);
1054 stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
1055 stubCall.addArgument(Imm32(currentInstruction[2].u.operand));
1056 stubCall.addArgument(Imm32(currentInstruction[3].u.operand));
1061 void JIT::emit_op_eq_null(Instruction* currentInstruction)
1063 unsigned dst = currentInstruction[1].u.operand;
1064 unsigned src1 = currentInstruction[2].u.operand;
1066 emitGetVirtualRegister(src1, regT0);
1067 Jump isImmediate = emitJumpIfNotJSCell(regT0);
1069 loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
1070 test8(NonZero, Address(regT2, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined), regT0);
1072 Jump wasNotImmediate = jump();
1074 isImmediate.link(this);
1076 andPtr(TrustedImm32(~TagBitUndefined), regT0);
1077 comparePtr(Equal, regT0, TrustedImm32(ValueNull), regT0);
1079 wasNotImmediate.link(this);
1081 emitTagAsBoolImmediate(regT0);
1082 emitPutVirtualRegister(dst);
1086 void JIT::emit_op_neq_null(Instruction* currentInstruction)
1088 unsigned dst = currentInstruction[1].u.operand;
1089 unsigned src1 = currentInstruction[2].u.operand;
1091 emitGetVirtualRegister(src1, regT0);
1092 Jump isImmediate = emitJumpIfNotJSCell(regT0);
1094 loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
1095 test8(Zero, Address(regT2, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined), regT0);
1097 Jump wasNotImmediate = jump();
1099 isImmediate.link(this);
1101 andPtr(TrustedImm32(~TagBitUndefined), regT0);
1102 comparePtr(NotEqual, regT0, TrustedImm32(ValueNull), regT0);
1104 wasNotImmediate.link(this);
1106 emitTagAsBoolImmediate(regT0);
1107 emitPutVirtualRegister(dst);
1110 void JIT::emit_op_enter(Instruction*)
1112 // Even though CTI doesn't use them, we initialize our constant
1113 // registers to zap stale pointers, to avoid unnecessarily prolonging
1114 // object lifetime and increasing GC pressure.
1115 size_t count = m_codeBlock->m_numVars;
1116 for (size_t j = 0; j < count; ++j)
1117 emitInitRegister(j);
1121 void JIT::emit_op_create_activation(Instruction* currentInstruction)
1123 unsigned dst = currentInstruction[1].u.operand;
1125 Jump activationCreated = branchTestPtr(NonZero, Address(callFrameRegister, sizeof(Register) * dst));
1126 JITStubCall(this, cti_op_push_activation).call(currentInstruction[1].u.operand);
1127 emitPutVirtualRegister(dst);
1128 activationCreated.link(this);
1131 void JIT::emit_op_create_arguments(Instruction* currentInstruction)
1133 unsigned dst = currentInstruction[1].u.operand;
1135 Jump argsCreated = branchTestPtr(NonZero, Address(callFrameRegister, sizeof(Register) * dst));
1136 if (m_codeBlock->m_numParameters == 1)
1137 JITStubCall(this, cti_op_create_arguments_no_params).call();
1139 JITStubCall(this, cti_op_create_arguments).call();
1140 emitPutVirtualRegister(dst);
1141 emitPutVirtualRegister(unmodifiedArgumentsRegister(dst));
1142 argsCreated.link(this);
1145 void JIT::emit_op_init_lazy_reg(Instruction* currentInstruction)
1147 unsigned dst = currentInstruction[1].u.operand;
1149 storePtr(TrustedImmPtr(0), Address(callFrameRegister, sizeof(Register) * dst));
1152 void JIT::emit_op_convert_this(Instruction* currentInstruction)
1154 emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
1156 emitJumpSlowCaseIfNotJSCell(regT0);
1157 addSlowCase(branchPtr(Equal, Address(regT0), TrustedImmPtr(m_globalData->jsStringVPtr)));
1160 void JIT::emit_op_get_callee(Instruction* currentInstruction)
1162 unsigned result = currentInstruction[1].u.operand;
1163 emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, regT0);
1164 emitPutVirtualRegister(result);
1167 void JIT::emit_op_create_this(Instruction* currentInstruction)
1169 emitGetVirtualRegister(currentInstruction[2].u.operand, regT2);
1170 emitJumpSlowCaseIfNotJSCell(regT2, currentInstruction[2].u.operand);
1171 loadPtr(Address(regT2, JSCell::structureOffset()), regT1);
1172 addSlowCase(emitJumpIfNotObject(regT1));
1174 // now we know that the prototype is an object, but we don't know if it's got an
1177 loadPtr(Address(regT2, JSObject::offsetOfInheritorID()), regT2);
1178 addSlowCase(branchTestPtr(Zero, regT2));
1180 // now regT2 contains the inheritorID, which is the structure that the newly
1181 // allocated object will have.
1183 emitAllocateJSFinalObject(regT2, regT0, regT1);
1185 emitPutVirtualRegister(currentInstruction[1].u.operand);
1188 void JIT::emitSlow_op_create_this(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1190 linkSlowCaseIfNotJSCell(iter, currentInstruction[2].u.operand); // not a cell
1191 linkSlowCase(iter); // not an object
1192 linkSlowCase(iter); // doesn't have an inheritor ID
1193 linkSlowCase(iter); // allocation failed
1194 JITStubCall stubCall(this, cti_op_create_this);
1195 stubCall.addArgument(currentInstruction[2].u.operand, regT1);
1196 stubCall.call(currentInstruction[1].u.operand);
1199 void JIT::emit_op_profile_will_call(Instruction* currentInstruction)
1201 peek(regT1, OBJECT_OFFSETOF(JITStackFrame, enabledProfilerReference) / sizeof(void*));
1202 Jump noProfiler = branchTestPtr(Zero, Address(regT1));
1204 JITStubCall stubCall(this, cti_op_profile_will_call);
1205 stubCall.addArgument(currentInstruction[1].u.operand, regT1);
1207 noProfiler.link(this);
1211 void JIT::emit_op_profile_did_call(Instruction* currentInstruction)
1213 peek(regT1, OBJECT_OFFSETOF(JITStackFrame, enabledProfilerReference) / sizeof(void*));
1214 Jump noProfiler = branchTestPtr(Zero, Address(regT1));
1216 JITStubCall stubCall(this, cti_op_profile_did_call);
1217 stubCall.addArgument(currentInstruction[1].u.operand, regT1);
1219 noProfiler.link(this);
1225 void JIT::emitSlow_op_convert_this(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1227 void* globalThis = m_codeBlock->globalObject()->globalScopeChain()->globalThis.get();
1230 Jump isNotUndefined = branchPtr(NotEqual, regT0, TrustedImmPtr(JSValue::encode(jsUndefined())));
1231 move(TrustedImmPtr(globalThis), regT0);
1232 emitPutVirtualRegister(currentInstruction[1].u.operand, regT0);
1233 emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_convert_this));
1235 isNotUndefined.link(this);
1237 JITStubCall stubCall(this, cti_op_convert_this);
1238 stubCall.addArgument(regT0);
1239 stubCall.call(currentInstruction[1].u.operand);
1242 void JIT::emitSlow_op_to_primitive(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1246 JITStubCall stubCall(this, cti_op_to_primitive);
1247 stubCall.addArgument(regT0);
1248 stubCall.call(currentInstruction[1].u.operand);
1251 void JIT::emitSlow_op_not(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1254 xorPtr(TrustedImm32(static_cast<int32_t>(ValueFalse)), regT0);
1255 JITStubCall stubCall(this, cti_op_not);
1256 stubCall.addArgument(regT0);
1257 stubCall.call(currentInstruction[1].u.operand);
1260 void JIT::emitSlow_op_jfalse(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1263 JITStubCall stubCall(this, cti_op_jtrue);
1264 stubCall.addArgument(regT0);
1266 emitJumpSlowToHot(branchTest32(Zero, regT0), currentInstruction[2].u.operand); // inverted!
1269 void JIT::emitSlow_op_bitnot(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1272 JITStubCall stubCall(this, cti_op_bitnot);
1273 stubCall.addArgument(regT0);
1274 stubCall.call(currentInstruction[1].u.operand);
1277 void JIT::emitSlow_op_jtrue(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1280 JITStubCall stubCall(this, cti_op_jtrue);
1281 stubCall.addArgument(regT0);
1283 emitJumpSlowToHot(branchTest32(NonZero, regT0), currentInstruction[2].u.operand);
1286 void JIT::emitSlow_op_bitxor(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1289 JITStubCall stubCall(this, cti_op_bitxor);
1290 stubCall.addArgument(regT0);
1291 stubCall.addArgument(regT1);
1292 stubCall.call(currentInstruction[1].u.operand);
1295 void JIT::emitSlow_op_bitor(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1298 JITStubCall stubCall(this, cti_op_bitor);
1299 stubCall.addArgument(regT0);
1300 stubCall.addArgument(regT1);
1301 stubCall.call(currentInstruction[1].u.operand);
1304 void JIT::emitSlow_op_eq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1307 JITStubCall stubCall(this, cti_op_eq);
1308 stubCall.addArgument(regT0);
1309 stubCall.addArgument(regT1);
1311 emitTagAsBoolImmediate(regT0);
1312 emitPutVirtualRegister(currentInstruction[1].u.operand);
1315 void JIT::emitSlow_op_neq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1318 JITStubCall stubCall(this, cti_op_eq);
1319 stubCall.addArgument(regT0);
1320 stubCall.addArgument(regT1);
1322 xor32(TrustedImm32(0x1), regT0);
1323 emitTagAsBoolImmediate(regT0);
1324 emitPutVirtualRegister(currentInstruction[1].u.operand);
1327 void JIT::emitSlow_op_stricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1331 JITStubCall stubCall(this, cti_op_stricteq);
1332 stubCall.addArgument(regT0);
1333 stubCall.addArgument(regT1);
1334 stubCall.call(currentInstruction[1].u.operand);
1337 void JIT::emitSlow_op_nstricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1341 JITStubCall stubCall(this, cti_op_nstricteq);
1342 stubCall.addArgument(regT0);
1343 stubCall.addArgument(regT1);
1344 stubCall.call(currentInstruction[1].u.operand);
1347 void JIT::emitSlow_op_check_has_instance(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1349 unsigned baseVal = currentInstruction[1].u.operand;
1351 linkSlowCaseIfNotJSCell(iter, baseVal);
1353 JITStubCall stubCall(this, cti_op_check_has_instance);
1354 stubCall.addArgument(baseVal, regT2);
1358 void JIT::emitSlow_op_instanceof(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1360 unsigned dst = currentInstruction[1].u.operand;
1361 unsigned value = currentInstruction[2].u.operand;
1362 unsigned baseVal = currentInstruction[3].u.operand;
1363 unsigned proto = currentInstruction[4].u.operand;
1365 linkSlowCaseIfNotJSCell(iter, value);
1366 linkSlowCaseIfNotJSCell(iter, proto);
1369 JITStubCall stubCall(this, cti_op_instanceof);
1370 stubCall.addArgument(value, regT2);
1371 stubCall.addArgument(baseVal, regT2);
1372 stubCall.addArgument(proto, regT2);
1376 void JIT::emitSlow_op_call(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1378 compileOpCallSlowCase(currentInstruction, iter, m_callLinkInfoIndex++, op_call);
1381 void JIT::emitSlow_op_call_eval(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1383 compileOpCallSlowCase(currentInstruction, iter, m_callLinkInfoIndex++, op_call_eval);
1386 void JIT::emitSlow_op_call_varargs(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1388 compileOpCallVarargsSlowCase(currentInstruction, iter);
1391 void JIT::emitSlow_op_construct(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1393 compileOpCallSlowCase(currentInstruction, iter, m_callLinkInfoIndex++, op_construct);
1396 void JIT::emitSlow_op_to_jsnumber(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1398 linkSlowCaseIfNotJSCell(iter, currentInstruction[2].u.operand);
1401 JITStubCall stubCall(this, cti_op_to_jsnumber);
1402 stubCall.addArgument(regT0);
1403 stubCall.call(currentInstruction[1].u.operand);
1406 void JIT::emit_op_get_arguments_length(Instruction* currentInstruction)
1408 int dst = currentInstruction[1].u.operand;
1409 int argumentsRegister = currentInstruction[2].u.operand;
1410 addSlowCase(branchTestPtr(NonZero, addressFor(argumentsRegister)));
1411 emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT0);
1412 sub32(TrustedImm32(1), regT0);
1413 emitFastArithReTagImmediate(regT0, regT0);
1414 emitPutVirtualRegister(dst, regT0);
1417 void JIT::emitSlow_op_get_arguments_length(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1420 unsigned dst = currentInstruction[1].u.operand;
1421 unsigned base = currentInstruction[2].u.operand;
1422 Identifier* ident = &(m_codeBlock->identifier(currentInstruction[3].u.operand));
1424 emitGetVirtualRegister(base, regT0);
1425 JITStubCall stubCall(this, cti_op_get_by_id_generic);
1426 stubCall.addArgument(regT0);
1427 stubCall.addArgument(TrustedImmPtr(ident));
1431 void JIT::emit_op_get_argument_by_val(Instruction* currentInstruction)
1433 int dst = currentInstruction[1].u.operand;
1434 int argumentsRegister = currentInstruction[2].u.operand;
1435 int property = currentInstruction[3].u.operand;
1436 addSlowCase(branchTestPtr(NonZero, addressFor(argumentsRegister)));
1437 emitGetVirtualRegister(property, regT1);
1438 addSlowCase(emitJumpIfNotImmediateInteger(regT1));
1439 add32(TrustedImm32(1), regT1);
1440 // regT1 now contains the integer index of the argument we want, including this
1441 emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT2);
1442 addSlowCase(branch32(AboveOrEqual, regT1, regT2));
1444 Jump skipOutofLineParams;
1445 int numArgs = m_codeBlock->m_numParameters;
1447 Jump notInInPlaceArgs = branch32(AboveOrEqual, regT1, Imm32(numArgs));
1448 addPtr(Imm32(static_cast<unsigned>(-(RegisterFile::CallFrameHeaderSize + numArgs) * sizeof(Register))), callFrameRegister, regT0);
1449 loadPtr(BaseIndex(regT0, regT1, TimesEight, 0), regT0);
1450 skipOutofLineParams = jump();
1451 notInInPlaceArgs.link(this);
1454 addPtr(Imm32(static_cast<unsigned>(-(RegisterFile::CallFrameHeaderSize + numArgs) * sizeof(Register))), callFrameRegister, regT0);
1455 mul32(TrustedImm32(sizeof(Register)), regT2, regT2);
1456 subPtr(regT2, regT0);
1457 loadPtr(BaseIndex(regT0, regT1, TimesEight, 0), regT0);
1459 skipOutofLineParams.link(this);
1460 emitPutVirtualRegister(dst, regT0);
1463 void JIT::emitSlow_op_get_argument_by_val(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1465 unsigned dst = currentInstruction[1].u.operand;
1466 unsigned arguments = currentInstruction[2].u.operand;
1467 unsigned property = currentInstruction[3].u.operand;
1470 Jump skipArgumentsCreation = jump();
1474 if (m_codeBlock->m_numParameters == 1)
1475 JITStubCall(this, cti_op_create_arguments_no_params).call();
1477 JITStubCall(this, cti_op_create_arguments).call();
1478 emitPutVirtualRegister(arguments);
1479 emitPutVirtualRegister(unmodifiedArgumentsRegister(arguments));
1481 skipArgumentsCreation.link(this);
1482 JITStubCall stubCall(this, cti_op_get_by_val);
1483 stubCall.addArgument(arguments, regT2);
1484 stubCall.addArgument(property, regT2);
1488 #endif // USE(JSVALUE64)
1490 void JIT::emit_op_resolve_global_dynamic(Instruction* currentInstruction)
1492 int skip = currentInstruction[5].u.operand;
1494 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT0);
1496 bool checkTopLevel = m_codeBlock->codeType() == FunctionCode && m_codeBlock->needsFullScopeChain();
1497 ASSERT(skip || !checkTopLevel);
1498 if (checkTopLevel && skip--) {
1499 Jump activationNotCreated;
1501 activationNotCreated = branchTestPtr(Zero, addressFor(m_codeBlock->activationRegister()));
1502 loadPtr(Address(regT0, OBJECT_OFFSETOF(ScopeChainNode, object)), regT1);
1503 addSlowCase(checkStructure(regT1, m_globalData->activationStructure.get()));
1504 loadPtr(Address(regT0, OBJECT_OFFSETOF(ScopeChainNode, next)), regT0);
1505 activationNotCreated.link(this);
1508 loadPtr(Address(regT0, OBJECT_OFFSETOF(ScopeChainNode, object)), regT1);
1509 addSlowCase(checkStructure(regT1, m_globalData->activationStructure.get()));
1510 loadPtr(Address(regT0, OBJECT_OFFSETOF(ScopeChainNode, next)), regT0);
1512 emit_op_resolve_global(currentInstruction, true);
1515 void JIT::emitSlow_op_resolve_global_dynamic(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1517 unsigned dst = currentInstruction[1].u.operand;
1518 Identifier* ident = &m_codeBlock->identifier(currentInstruction[2].u.operand);
1519 int skip = currentInstruction[5].u.operand;
1522 JITStubCall resolveStubCall(this, cti_op_resolve);
1523 resolveStubCall.addArgument(TrustedImmPtr(ident));
1524 resolveStubCall.call(dst);
1525 emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_resolve_global_dynamic));
1527 unsigned currentIndex = m_globalResolveInfoIndex++;
1529 linkSlowCase(iter); // We managed to skip all the nodes in the scope chain, but the cache missed.
1530 JITStubCall stubCall(this, cti_op_resolve_global);
1531 stubCall.addArgument(TrustedImmPtr(ident));
1532 stubCall.addArgument(Imm32(currentIndex));
1533 stubCall.addArgument(regT0);
1537 void JIT::emit_op_new_regexp(Instruction* currentInstruction)
1539 JITStubCall stubCall(this, cti_op_new_regexp);
1540 stubCall.addArgument(TrustedImmPtr(m_codeBlock->regexp(currentInstruction[2].u.operand)));
1541 stubCall.call(currentInstruction[1].u.operand);
1544 void JIT::emit_op_load_varargs(Instruction* currentInstruction)
1546 int argCountDst = currentInstruction[1].u.operand;
1547 int argsOffset = currentInstruction[2].u.operand;
1548 int registerOffset = currentInstruction[3].u.operand;
1549 ASSERT(argsOffset <= registerOffset);
1551 int expectedParams = m_codeBlock->m_numParameters - 1;
1552 // Don't do inline copying if we aren't guaranteed to have a single stream
1554 if (expectedParams) {
1555 JITStubCall stubCall(this, cti_op_load_varargs);
1556 stubCall.addArgument(Imm32(argsOffset));
1558 // Stores a naked int32 in the register file.
1559 store32(returnValueRegister, Address(callFrameRegister, argCountDst * sizeof(Register)));
1563 #if USE(JSVALUE32_64)
1564 addSlowCase(branch32(NotEqual, tagFor(argsOffset), TrustedImm32(JSValue::EmptyValueTag)));
1566 addSlowCase(branchTestPtr(NonZero, addressFor(argsOffset)));
1568 // Load arg count into regT0
1569 emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT0);
1570 store32(TrustedImm32(Int32Tag), intTagFor(argCountDst));
1571 store32(regT0, intPayloadFor(argCountDst));
1572 Jump endBranch = branch32(Equal, regT0, TrustedImm32(1));
1574 mul32(TrustedImm32(sizeof(Register)), regT0, regT3);
1575 addPtr(TrustedImm32(static_cast<unsigned>(sizeof(Register) - RegisterFile::CallFrameHeaderSize * sizeof(Register))), callFrameRegister, regT1);
1576 subPtr(regT3, regT1); // regT1 is now the start of the out of line arguments
1577 addPtr(Imm32(argsOffset * sizeof(Register)), callFrameRegister, regT2); // regT2 is the target buffer
1579 // Bounds check the registerfile
1580 addPtr(regT2, regT3);
1581 addPtr(Imm32((registerOffset - argsOffset) * sizeof(Register)), regT3);
1582 addSlowCase(branchPtr(Below, AbsoluteAddress(m_globalData->interpreter->registerFile().addressOfEnd()), regT3));
1584 sub32(TrustedImm32(1), regT0);
1585 Label loopStart = label();
1586 loadPtr(BaseIndex(regT1, regT0, TimesEight, static_cast<unsigned>(0 - 2 * sizeof(Register))), regT3);
1587 storePtr(regT3, BaseIndex(regT2, regT0, TimesEight, static_cast<unsigned>(0 - sizeof(Register))));
1588 #if USE(JSVALUE32_64)
1589 loadPtr(BaseIndex(regT1, regT0, TimesEight, static_cast<unsigned>(sizeof(void*) - 2 * sizeof(Register))), regT3);
1590 storePtr(regT3, BaseIndex(regT2, regT0, TimesEight, static_cast<unsigned>(sizeof(void*) - sizeof(Register))));
1592 branchSubPtr(NonZero, TrustedImm32(1), regT0).linkTo(loopStart, this);
1593 endBranch.link(this);
1596 void JIT::emitSlow_op_load_varargs(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1598 int argCountDst = currentInstruction[1].u.operand;
1599 int argsOffset = currentInstruction[2].u.operand;
1600 int expectedParams = m_codeBlock->m_numParameters - 1;
1606 JITStubCall stubCall(this, cti_op_load_varargs);
1607 stubCall.addArgument(Imm32(argsOffset));
1610 store32(TrustedImm32(Int32Tag), intTagFor(argCountDst));
1611 store32(returnValueRegister, intPayloadFor(argCountDst));
1614 void JIT::emit_op_new_func(Instruction* currentInstruction)
1617 int dst = currentInstruction[1].u.operand;
1618 if (currentInstruction[3].u.operand) {
1619 #if USE(JSVALUE32_64)
1620 lazyJump = branch32(NotEqual, tagFor(dst), TrustedImm32(JSValue::EmptyValueTag));
1622 lazyJump = branchTestPtr(NonZero, addressFor(dst));
1626 FunctionExecutable* executable = m_codeBlock->functionDecl(currentInstruction[2].u.operand);
1627 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT2);
1628 emitAllocateJSFunction(executable, regT2, regT0, regT1);
1630 emitStoreCell(dst, regT0);
1632 if (currentInstruction[3].u.operand) {
1633 #if USE(JSVALUE32_64)
1636 killLastResultRegister();
1638 lazyJump.link(this);
1642 void JIT::emitSlow_op_new_func(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1645 JITStubCall stubCall(this, cti_op_new_func);
1646 stubCall.addArgument(TrustedImmPtr(m_codeBlock->functionDecl(currentInstruction[2].u.operand)));
1647 stubCall.call(currentInstruction[1].u.operand);
1650 void JIT::emit_op_new_func_exp(Instruction* currentInstruction)
1652 FunctionExecutable* executable = m_codeBlock->functionExpr(currentInstruction[2].u.operand);
1654 // We only inline the allocation of a anonymous function expressions
1655 // If we want to be able to allocate a named function expression, we would
1656 // need to be able to do inline allocation of a JSStaticScopeObject.
1657 if (executable->name().isNull()) {
1658 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT2);
1659 emitAllocateJSFunction(executable, regT2, regT0, regT1);
1660 emitStoreCell(currentInstruction[1].u.operand, regT0);
1664 JITStubCall stubCall(this, cti_op_new_func_exp);
1665 stubCall.addArgument(TrustedImmPtr(m_codeBlock->functionExpr(currentInstruction[2].u.operand)));
1666 stubCall.call(currentInstruction[1].u.operand);
1669 void JIT::emitSlow_op_new_func_exp(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1671 FunctionExecutable* executable = m_codeBlock->functionExpr(currentInstruction[2].u.operand);
1672 if (!executable->name().isNull())
1675 JITStubCall stubCall(this, cti_op_new_func_exp);
1676 stubCall.addArgument(TrustedImmPtr(executable));
1677 stubCall.call(currentInstruction[1].u.operand);
1680 void JIT::emit_op_new_array(Instruction* currentInstruction)
1682 JITStubCall stubCall(this, cti_op_new_array);
1683 stubCall.addArgument(Imm32(currentInstruction[2].u.operand));
1684 stubCall.addArgument(Imm32(currentInstruction[3].u.operand));
1685 stubCall.call(currentInstruction[1].u.operand);
1688 void JIT::emit_op_new_array_buffer(Instruction* currentInstruction)
1690 JITStubCall stubCall(this, cti_op_new_array_buffer);
1691 stubCall.addArgument(Imm32(currentInstruction[2].u.operand));
1692 stubCall.addArgument(Imm32(currentInstruction[3].u.operand));
1693 stubCall.call(currentInstruction[1].u.operand);
1698 #endif // ENABLE(JIT)