2 * Copyright (C) 2009 Apple Inc. All rights reserved.
3 * Copyright (C) 2010 Patrick Gansterer <paroga@paroga.com>
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions
8 * 1. Redistributions of source code must retain the above copyright
9 * notice, this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright
11 * notice, this list of conditions and the following disclaimer in the
12 * documentation and/or other materials provided with the distribution.
14 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
15 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
16 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
17 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
18 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
19 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
20 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
21 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
22 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
23 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
24 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
33 #include "JITInlineMethods.h"
34 #include "JITStubCall.h"
37 #include "JSFunction.h"
38 #include "JSPropertyNameIterator.h"
39 #include "LinkBuffer.h"
43 PassRefPtr<ExecutableMemoryHandle> JIT::privateCompileCTIMachineTrampolines(JSGlobalData* globalData, TrampolineStructure *trampolines)
45 #if ENABLE(JIT_USE_SOFT_MODULO)
46 Label softModBegin = align();
49 // (1) This function provides fast property access for string length
50 Label stringLengthBegin = align();
52 // regT0 holds payload, regT1 holds tag
54 Jump string_failureCases1 = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
55 Jump string_failureCases2 = branchPtr(NotEqual, Address(regT0), TrustedImmPtr(m_globalData->jsStringVPtr));
57 // Checks out okay! - get the length from the Ustring.
58 load32(Address(regT0, OBJECT_OFFSETOF(JSString, m_length)), regT2);
60 Jump string_failureCases3 = branch32(Above, regT2, TrustedImm32(INT_MAX));
62 move(TrustedImm32(JSValue::Int32Tag), regT1);
66 JumpList callLinkFailures;
67 // (2) Trampolines for the slow cases of op_call / op_call_eval / op_construct.
68 // VirtualCallLink Trampoline
69 // regT0 holds callee, regT1 holds argCount. regT2 will hold the FunctionExecutable.
70 Label virtualCallLinkBegin = align();
71 compileOpCallInitializeCallFrame();
72 preserveReturnAddressAfterCall(regT3);
73 emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
74 restoreArgumentReference();
75 Call callLazyLinkCall = call();
76 callLinkFailures.append(branchTestPtr(Zero, regT0));
77 restoreReturnAddressBeforeReturn(regT3);
78 emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT1);
81 // VirtualConstructLink Trampoline
82 // regT0 holds callee, regT1 holds argCount. regT2 will hold the FunctionExecutable.
83 Label virtualConstructLinkBegin = align();
84 compileOpCallInitializeCallFrame();
85 preserveReturnAddressAfterCall(regT3);
86 emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
87 restoreArgumentReference();
88 Call callLazyLinkConstruct = call();
89 restoreReturnAddressBeforeReturn(regT3);
90 callLinkFailures.append(branchTestPtr(Zero, regT0));
91 emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT1);
94 // VirtualCall Trampoline
95 // regT0 holds callee, regT1 holds argCount. regT2 will hold the FunctionExecutable.
96 Label virtualCallBegin = align();
97 compileOpCallInitializeCallFrame();
99 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
101 Jump hasCodeBlock3 = branch32(GreaterThanOrEqual, Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParametersForCall)), TrustedImm32(0));
102 preserveReturnAddressAfterCall(regT3);
103 restoreArgumentReference();
104 Call callCompileCall = call();
105 callLinkFailures.append(branchTestPtr(Zero, regT0));
106 emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT1);
107 restoreReturnAddressBeforeReturn(regT3);
108 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
109 hasCodeBlock3.link(this);
111 loadPtr(Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_jitCodeForCallWithArityCheck)), regT0);
114 // VirtualConstruct Trampoline
115 // regT0 holds callee, regT1 holds argCount. regT2 will hold the FunctionExecutable.
116 Label virtualConstructBegin = align();
117 compileOpCallInitializeCallFrame();
119 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
121 Jump hasCodeBlock4 = branch32(GreaterThanOrEqual, Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParametersForConstruct)), TrustedImm32(0));
122 preserveReturnAddressAfterCall(regT3);
123 restoreArgumentReference();
124 Call callCompileCconstruct = call();
125 callLinkFailures.append(branchTestPtr(Zero, regT0));
126 emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT1);
127 restoreReturnAddressBeforeReturn(regT3);
128 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
129 hasCodeBlock4.link(this);
131 loadPtr(Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_jitCodeForConstructWithArityCheck)), regT0);
134 // If the parser fails we want to be able to be able to keep going,
135 // So we handle this as a parse failure.
136 callLinkFailures.link(this);
137 emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC, regT1);
138 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, callFrameRegister);
139 restoreReturnAddressBeforeReturn(regT1);
140 move(TrustedImmPtr(&globalData->exceptionLocation), regT2);
141 storePtr(regT1, regT2);
142 poke(callFrameRegister, 1 + OBJECT_OFFSETOF(struct JITStackFrame, callFrame) / sizeof(void*));
143 poke(TrustedImmPtr(FunctionPtr(ctiVMThrowTrampoline).value()));
146 // NativeCall Trampoline
147 Label nativeCallThunk = privateCompileCTINativeCall(globalData);
148 Label nativeConstructThunk = privateCompileCTINativeCall(globalData, true);
150 Call string_failureCases1Call = makeTailRecursiveCall(string_failureCases1);
151 Call string_failureCases2Call = makeTailRecursiveCall(string_failureCases2);
152 Call string_failureCases3Call = makeTailRecursiveCall(string_failureCases3);
154 // All trampolines constructed! copy the code, link up calls, and set the pointers on the Machine object.
155 LinkBuffer patchBuffer(*m_globalData, this);
157 patchBuffer.link(string_failureCases1Call, FunctionPtr(cti_op_get_by_id_string_fail));
158 patchBuffer.link(string_failureCases2Call, FunctionPtr(cti_op_get_by_id_string_fail));
159 patchBuffer.link(string_failureCases3Call, FunctionPtr(cti_op_get_by_id_string_fail));
160 patchBuffer.link(callLazyLinkCall, FunctionPtr(cti_vm_lazyLinkCall));
161 patchBuffer.link(callLazyLinkConstruct, FunctionPtr(cti_vm_lazyLinkConstruct));
162 patchBuffer.link(callCompileCall, FunctionPtr(cti_op_call_jitCompile));
163 patchBuffer.link(callCompileCconstruct, FunctionPtr(cti_op_construct_jitCompile));
165 CodeRef finalCode = patchBuffer.finalizeCode();
166 RefPtr<ExecutableMemoryHandle> executableMemory = finalCode.executableMemory();
168 trampolines->ctiVirtualCall = patchBuffer.trampolineAt(virtualCallBegin);
169 trampolines->ctiVirtualConstruct = patchBuffer.trampolineAt(virtualConstructBegin);
170 trampolines->ctiNativeCall = patchBuffer.trampolineAt(nativeCallThunk);
171 trampolines->ctiNativeConstruct = patchBuffer.trampolineAt(nativeConstructThunk);
172 trampolines->ctiStringLengthTrampoline = patchBuffer.trampolineAt(stringLengthBegin);
173 trampolines->ctiVirtualCallLink = patchBuffer.trampolineAt(virtualCallLinkBegin);
174 trampolines->ctiVirtualConstructLink = patchBuffer.trampolineAt(virtualConstructLinkBegin);
175 #if ENABLE(JIT_USE_SOFT_MODULO)
176 trampolines->ctiSoftModulo = patchBuffer.trampolineAt(softModBegin);
179 return executableMemory.release();
182 JIT::Label JIT::privateCompileCTINativeCall(JSGlobalData* globalData, bool isConstruct)
184 int executableOffsetToFunction = isConstruct ? OBJECT_OFFSETOF(NativeExecutable, m_constructor) : OBJECT_OFFSETOF(NativeExecutable, m_function);
186 Label nativeCallThunk = align();
188 emitPutImmediateToCallFrameHeader(0, RegisterFile::CodeBlock);
191 // Load caller frame's scope chain into this callframe so that whatever we call can
192 // get to its global data.
193 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT0);
194 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT0);
195 emitPutCellToCallFrameHeader(regT1, RegisterFile::ScopeChain);
198 emitPutToCallFrameHeader(regT1, RegisterFile::ReturnPC);
200 // Calling convention: f(ecx, edx, ...);
201 // Host function signature: f(ExecState*);
202 move(callFrameRegister, X86Registers::ecx);
204 subPtr(TrustedImm32(16 - sizeof(void*)), stackPointerRegister); // Align stack after call.
207 emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, regT1);
208 loadPtr(Address(regT1, OBJECT_OFFSETOF(JSFunction, m_executable)), regT1);
209 move(regT0, callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack.
210 call(Address(regT1, executableOffsetToFunction));
212 addPtr(TrustedImm32(16 - sizeof(void*)), stackPointerRegister);
215 // Load caller frame's scope chain into this callframe so that whatever we call can
216 // get to its global data.
217 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT2);
218 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT2);
219 emitPutCellToCallFrameHeader(regT1, RegisterFile::ScopeChain);
221 preserveReturnAddressAfterCall(regT3); // Callee preserved
222 emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
224 // Calling convention: f(r0 == regT0, r1 == regT1, ...);
225 // Host function signature: f(ExecState*);
226 move(callFrameRegister, ARMRegisters::r0);
229 emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, ARMRegisters::r1);
230 move(regT2, callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack.
231 loadPtr(Address(ARMRegisters::r1, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
232 call(Address(regT2, executableOffsetToFunction));
234 restoreReturnAddressBeforeReturn(regT3);
236 // Load caller frame's scope chain into this callframe so that whatever we call can
237 // get to its global data.
238 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT2);
239 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT2);
240 emitPutToCallFrameHeader(regT1, RegisterFile::ScopeChain);
242 preserveReturnAddressAfterCall(regT3); // Callee preserved
243 emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
245 // Calling convention: f(r0 == regT4, r1 == regT5, ...);
246 // Host function signature: f(ExecState*);
247 move(callFrameRegister, regT4);
249 emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, regT5);
250 move(regT2, callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack.
251 loadPtr(Address(regT5, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
253 call(Address(regT2, executableOffsetToFunction), regT0);
254 restoreReturnAddressBeforeReturn(regT3);
256 // Load caller frame's scope chain into this callframe so that whatever we call can
257 // get to its global data.
258 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT0);
259 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT0);
260 emitPutCellToCallFrameHeader(regT1, RegisterFile::ScopeChain);
262 preserveReturnAddressAfterCall(regT3); // Callee preserved
263 emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
265 // Calling convention: f(a0, a1, a2, a3);
266 // Host function signature: f(ExecState*);
268 // Allocate stack space for 16 bytes (8-byte aligned)
269 // 16 bytes (unused) for 4 arguments
270 subPtr(TrustedImm32(16), stackPointerRegister);
273 move(callFrameRegister, MIPSRegisters::a0);
276 emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, MIPSRegisters::a2);
277 loadPtr(Address(MIPSRegisters::a2, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
278 move(regT0, callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack.
279 call(Address(regT2, executableOffsetToFunction));
281 // Restore stack space
282 addPtr(TrustedImm32(16), stackPointerRegister);
284 restoreReturnAddressBeforeReturn(regT3);
287 #error "JIT not supported on this platform."
288 UNUSED_PARAM(executableOffsetToFunction);
292 // Check for an exception
293 Jump sawException = branch32(NotEqual, AbsoluteAddress(reinterpret_cast<char*>(&globalData->exception) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), TrustedImm32(JSValue::EmptyValueTag));
298 // Handle an exception
299 sawException.link(this);
301 // Grab the return address.
302 preserveReturnAddressAfterCall(regT1);
304 move(TrustedImmPtr(&globalData->exceptionLocation), regT2);
305 storePtr(regT1, regT2);
306 poke(callFrameRegister, OBJECT_OFFSETOF(struct JITStackFrame, callFrame) / sizeof(void*));
308 // Set the return address.
309 move(TrustedImmPtr(FunctionPtr(ctiVMThrowTrampoline).value()), regT1);
310 restoreReturnAddressBeforeReturn(regT1);
314 return nativeCallThunk;
317 JIT::CodeRef JIT::privateCompileCTINativeCall(JSGlobalData* globalData, NativeFunction func)
321 emitPutImmediateToCallFrameHeader(0, RegisterFile::CodeBlock);
324 // Load caller frame's scope chain into this callframe so that whatever we call can
325 // get to its global data.
326 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT0);
327 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT0);
328 emitPutCellToCallFrameHeader(regT1, RegisterFile::ScopeChain);
331 emitPutToCallFrameHeader(regT1, RegisterFile::ReturnPC);
333 // Calling convention: f(ecx, edx, ...);
334 // Host function signature: f(ExecState*);
335 move(callFrameRegister, X86Registers::ecx);
337 subPtr(TrustedImm32(16 - sizeof(void*)), stackPointerRegister); // Align stack after call.
339 move(regT0, callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack.
344 addPtr(TrustedImm32(16 - sizeof(void*)), stackPointerRegister);
347 // Load caller frame's scope chain into this callframe so that whatever we call can
348 // get to its global data.
349 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT2);
350 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT2);
351 emitPutCellToCallFrameHeader(regT1, RegisterFile::ScopeChain);
353 preserveReturnAddressAfterCall(regT3); // Callee preserved
354 emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
356 // Calling convention: f(r0 == regT0, r1 == regT1, ...);
357 // Host function signature: f(ExecState*);
358 move(callFrameRegister, ARMRegisters::r0);
360 emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, ARMRegisters::r1);
361 move(regT2, callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack.
362 loadPtr(Address(ARMRegisters::r1, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
367 restoreReturnAddressBeforeReturn(regT3);
370 // Load caller frame's scope chain into this callframe so that whatever we call can
371 // get to its global data.
372 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT0);
373 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT0);
374 emitPutCellToCallFrameHeader(regT1, RegisterFile::ScopeChain);
376 preserveReturnAddressAfterCall(regT3); // Callee preserved
377 emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
379 // Calling convention: f(a0, a1, a2, a3);
380 // Host function signature: f(ExecState*);
382 // Allocate stack space for 16 bytes (8-byte aligned)
383 // 16 bytes (unused) for 4 arguments
384 subPtr(TrustedImm32(16), stackPointerRegister);
387 move(callFrameRegister, MIPSRegisters::a0);
390 emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, MIPSRegisters::a2);
391 loadPtr(Address(MIPSRegisters::a2, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
392 move(regT0, callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack.
397 // Restore stack space
398 addPtr(TrustedImm32(16), stackPointerRegister);
400 restoreReturnAddressBeforeReturn(regT3);
402 // Load caller frame's scope chain into this callframe so that whatever we call can
403 // get to its global data.
404 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT2);
405 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT2);
406 emitPutToCallFrameHeader(regT1, RegisterFile::ScopeChain);
408 preserveReturnAddressAfterCall(regT3); // Callee preserved
409 emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
411 // Calling convention: f(r0 == regT4, r1 == regT5, ...);
412 // Host function signature: f(ExecState*);
413 move(callFrameRegister, regT4);
415 emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, regT5);
416 move(regT2, callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack.
417 loadPtr(Address(regT5, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
422 restoreReturnAddressBeforeReturn(regT3);
424 #error "JIT not supported on this platform."
428 // Check for an exception
429 Jump sawException = branch32(NotEqual, AbsoluteAddress(reinterpret_cast<char*>(&globalData->exception) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), TrustedImm32(JSValue::EmptyValueTag));
434 // Handle an exception
435 sawException.link(this);
437 // Grab the return address.
438 preserveReturnAddressAfterCall(regT1);
440 move(TrustedImmPtr(&globalData->exceptionLocation), regT2);
441 storePtr(regT1, regT2);
442 poke(callFrameRegister, OBJECT_OFFSETOF(struct JITStackFrame, callFrame) / sizeof(void*));
444 // Set the return address.
445 move(TrustedImmPtr(FunctionPtr(ctiVMThrowTrampoline).value()), regT1);
446 restoreReturnAddressBeforeReturn(regT1);
450 // All trampolines constructed! copy the code, link up calls, and set the pointers on the Machine object.
451 LinkBuffer patchBuffer(*m_globalData, this);
453 patchBuffer.link(nativeCall, FunctionPtr(func));
454 return patchBuffer.finalizeCode();
457 void JIT::emit_op_mov(Instruction* currentInstruction)
459 unsigned dst = currentInstruction[1].u.operand;
460 unsigned src = currentInstruction[2].u.operand;
462 if (m_codeBlock->isConstantRegisterIndex(src))
463 emitStore(dst, getConstantOperand(src));
465 emitLoad(src, regT1, regT0);
466 emitStore(dst, regT1, regT0);
467 map(m_bytecodeOffset + OPCODE_LENGTH(op_mov), dst, regT1, regT0);
471 void JIT::emit_op_end(Instruction* currentInstruction)
473 ASSERT(returnValueRegister != callFrameRegister);
474 emitLoad(currentInstruction[1].u.operand, regT1, regT0);
475 restoreReturnAddressBeforeReturn(Address(callFrameRegister, RegisterFile::ReturnPC * static_cast<int>(sizeof(Register))));
479 void JIT::emit_op_jmp(Instruction* currentInstruction)
481 unsigned target = currentInstruction[1].u.operand;
482 addJump(jump(), target);
485 void JIT::emit_op_new_object(Instruction* currentInstruction)
487 emitAllocateJSFinalObject(ImmPtr(m_codeBlock->globalObject()->emptyObjectStructure()), regT0, regT1);
489 emitStoreCell(currentInstruction[1].u.operand, regT0);
492 void JIT::emitSlow_op_new_object(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
495 JITStubCall(this, cti_op_new_object).call(currentInstruction[1].u.operand);
498 void JIT::emit_op_check_has_instance(Instruction* currentInstruction)
500 unsigned baseVal = currentInstruction[1].u.operand;
502 emitLoadPayload(baseVal, regT0);
504 // Check that baseVal is a cell.
505 emitJumpSlowCaseIfNotJSCell(baseVal);
507 // Check that baseVal 'ImplementsHasInstance'.
508 loadPtr(Address(regT0, JSCell::structureOffset()), regT0);
509 addSlowCase(branchTest8(Zero, Address(regT0, Structure::typeInfoFlagsOffset()), TrustedImm32(ImplementsHasInstance)));
512 void JIT::emit_op_instanceof(Instruction* currentInstruction)
514 unsigned dst = currentInstruction[1].u.operand;
515 unsigned value = currentInstruction[2].u.operand;
516 unsigned baseVal = currentInstruction[3].u.operand;
517 unsigned proto = currentInstruction[4].u.operand;
519 // Load the operands into registers.
520 // We use regT0 for baseVal since we will be done with this first, and we can then use it for the result.
521 emitLoadPayload(value, regT2);
522 emitLoadPayload(baseVal, regT0);
523 emitLoadPayload(proto, regT1);
525 // Check that proto are cells. baseVal must be a cell - this is checked by op_check_has_instance.
526 emitJumpSlowCaseIfNotJSCell(value);
527 emitJumpSlowCaseIfNotJSCell(proto);
529 // Check that prototype is an object
530 loadPtr(Address(regT1, JSCell::structureOffset()), regT3);
531 addSlowCase(emitJumpIfNotObject(regT3));
533 // Fixme: this check is only needed because the JSC API allows HasInstance to be overridden; we should deprecate this.
534 // Check that baseVal 'ImplementsDefaultHasInstance'.
535 loadPtr(Address(regT0, JSCell::structureOffset()), regT0);
536 addSlowCase(branchTest8(Zero, Address(regT0, Structure::typeInfoFlagsOffset()), TrustedImm32(ImplementsDefaultHasInstance)));
538 // Optimistically load the result true, and start looping.
539 // Initially, regT1 still contains proto and regT2 still contains value.
540 // As we loop regT2 will be updated with its prototype, recursively walking the prototype chain.
541 move(TrustedImm32(1), regT0);
544 // Load the prototype of the cell in regT2. If this is equal to regT1 - WIN!
545 // Otherwise, check if we've hit null - if we have then drop out of the loop, if not go again.
546 loadPtr(Address(regT2, JSCell::structureOffset()), regT2);
547 load32(Address(regT2, Structure::prototypeOffset() + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT2);
548 Jump isInstance = branchPtr(Equal, regT2, regT1);
549 branchTest32(NonZero, regT2).linkTo(loop, this);
551 // We get here either by dropping out of the loop, or if value was not an Object. Result is false.
552 move(TrustedImm32(0), regT0);
554 // isInstance jumps right down to here, to skip setting the result to false (it has already set true).
555 isInstance.link(this);
556 emitStoreBool(dst, regT0);
559 void JIT::emitSlow_op_check_has_instance(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
561 unsigned baseVal = currentInstruction[1].u.operand;
563 linkSlowCaseIfNotJSCell(iter, baseVal);
566 JITStubCall stubCall(this, cti_op_check_has_instance);
567 stubCall.addArgument(baseVal);
571 void JIT::emitSlow_op_instanceof(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
573 unsigned dst = currentInstruction[1].u.operand;
574 unsigned value = currentInstruction[2].u.operand;
575 unsigned baseVal = currentInstruction[3].u.operand;
576 unsigned proto = currentInstruction[4].u.operand;
578 linkSlowCaseIfNotJSCell(iter, value);
579 linkSlowCaseIfNotJSCell(iter, proto);
583 JITStubCall stubCall(this, cti_op_instanceof);
584 stubCall.addArgument(value);
585 stubCall.addArgument(baseVal);
586 stubCall.addArgument(proto);
590 void JIT::emit_op_tear_off_activation(Instruction* currentInstruction)
592 unsigned activation = currentInstruction[1].u.operand;
593 unsigned arguments = currentInstruction[2].u.operand;
594 Jump activationCreated = branch32(NotEqual, tagFor(activation), TrustedImm32(JSValue::EmptyValueTag));
595 Jump argumentsNotCreated = branch32(Equal, tagFor(arguments), TrustedImm32(JSValue::EmptyValueTag));
596 activationCreated.link(this);
597 JITStubCall stubCall(this, cti_op_tear_off_activation);
598 stubCall.addArgument(currentInstruction[1].u.operand);
599 stubCall.addArgument(unmodifiedArgumentsRegister(currentInstruction[2].u.operand));
601 argumentsNotCreated.link(this);
604 void JIT::emit_op_tear_off_arguments(Instruction* currentInstruction)
606 int dst = currentInstruction[1].u.operand;
608 Jump argsNotCreated = branch32(Equal, tagFor(unmodifiedArgumentsRegister(dst)), TrustedImm32(JSValue::EmptyValueTag));
609 JITStubCall stubCall(this, cti_op_tear_off_arguments);
610 stubCall.addArgument(unmodifiedArgumentsRegister(dst));
612 argsNotCreated.link(this);
615 void JIT::emit_op_resolve(Instruction* currentInstruction)
617 JITStubCall stubCall(this, cti_op_resolve);
618 stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
619 stubCall.call(currentInstruction[1].u.operand);
622 void JIT::emit_op_to_primitive(Instruction* currentInstruction)
624 int dst = currentInstruction[1].u.operand;
625 int src = currentInstruction[2].u.operand;
627 emitLoad(src, regT1, regT0);
629 Jump isImm = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
630 addSlowCase(branchPtr(NotEqual, Address(regT0), TrustedImmPtr(m_globalData->jsStringVPtr)));
634 emitStore(dst, regT1, regT0);
635 map(m_bytecodeOffset + OPCODE_LENGTH(op_to_primitive), dst, regT1, regT0);
638 void JIT::emitSlow_op_to_primitive(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
640 int dst = currentInstruction[1].u.operand;
644 JITStubCall stubCall(this, cti_op_to_primitive);
645 stubCall.addArgument(regT1, regT0);
649 void JIT::emit_op_strcat(Instruction* currentInstruction)
651 JITStubCall stubCall(this, cti_op_strcat);
652 stubCall.addArgument(Imm32(currentInstruction[2].u.operand));
653 stubCall.addArgument(Imm32(currentInstruction[3].u.operand));
654 stubCall.call(currentInstruction[1].u.operand);
657 void JIT::emit_op_resolve_base(Instruction* currentInstruction)
659 JITStubCall stubCall(this, currentInstruction[3].u.operand ? cti_op_resolve_base_strict_put : cti_op_resolve_base);
660 stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
661 stubCall.call(currentInstruction[1].u.operand);
664 void JIT::emit_op_ensure_property_exists(Instruction* currentInstruction)
666 JITStubCall stubCall(this, cti_op_ensure_property_exists);
667 stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
668 stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
669 stubCall.call(currentInstruction[1].u.operand);
672 void JIT::emit_op_resolve_skip(Instruction* currentInstruction)
674 JITStubCall stubCall(this, cti_op_resolve_skip);
675 stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
676 stubCall.addArgument(Imm32(currentInstruction[3].u.operand));
677 stubCall.call(currentInstruction[1].u.operand);
680 void JIT::emit_op_resolve_global(Instruction* currentInstruction, bool dynamic)
682 // FIXME: Optimize to use patching instead of so many memory accesses.
684 unsigned dst = currentInstruction[1].u.operand;
685 void* globalObject = m_codeBlock->globalObject();
687 unsigned currentIndex = m_globalResolveInfoIndex++;
688 GlobalResolveInfo* resolveInfoAddress = &m_codeBlock->globalResolveInfo(currentIndex);
692 move(TrustedImmPtr(globalObject), regT0);
693 move(TrustedImmPtr(resolveInfoAddress), regT3);
694 loadPtr(Address(regT3, OBJECT_OFFSETOF(GlobalResolveInfo, structure)), regT1);
695 addSlowCase(branchPtr(NotEqual, regT1, Address(regT0, JSCell::structureOffset())));
698 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSGlobalObject, m_propertyStorage)), regT2);
699 load32(Address(regT3, OBJECT_OFFSETOF(GlobalResolveInfo, offset)), regT3);
700 load32(BaseIndex(regT2, regT3, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT0); // payload
701 load32(BaseIndex(regT2, regT3, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT1); // tag
702 emitStore(dst, regT1, regT0);
703 map(m_bytecodeOffset + (dynamic ? OPCODE_LENGTH(op_resolve_global_dynamic) : OPCODE_LENGTH(op_resolve_global)), dst, regT1, regT0);
706 void JIT::emitSlow_op_resolve_global(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
708 unsigned dst = currentInstruction[1].u.operand;
709 Identifier* ident = &m_codeBlock->identifier(currentInstruction[2].u.operand);
711 unsigned currentIndex = m_globalResolveInfoIndex++;
714 JITStubCall stubCall(this, cti_op_resolve_global);
715 stubCall.addArgument(TrustedImmPtr(ident));
716 stubCall.addArgument(Imm32(currentIndex));
720 void JIT::emit_op_not(Instruction* currentInstruction)
722 unsigned dst = currentInstruction[1].u.operand;
723 unsigned src = currentInstruction[2].u.operand;
725 emitLoadTag(src, regT0);
727 emitLoad(src, regT1, regT0);
728 addSlowCase(branch32(NotEqual, regT1, TrustedImm32(JSValue::BooleanTag)));
729 xor32(TrustedImm32(1), regT0);
731 emitStoreBool(dst, regT0, (dst == src));
734 void JIT::emitSlow_op_not(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
736 unsigned dst = currentInstruction[1].u.operand;
737 unsigned src = currentInstruction[2].u.operand;
741 JITStubCall stubCall(this, cti_op_not);
742 stubCall.addArgument(src);
746 void JIT::emit_op_jfalse(Instruction* currentInstruction)
748 unsigned cond = currentInstruction[1].u.operand;
749 unsigned target = currentInstruction[2].u.operand;
751 emitLoad(cond, regT1, regT0);
753 ASSERT((JSValue::BooleanTag + 1 == JSValue::Int32Tag) && !(JSValue::Int32Tag + 1));
754 addSlowCase(branch32(Below, regT1, TrustedImm32(JSValue::BooleanTag)));
755 addJump(branchTest32(Zero, regT0), target);
758 void JIT::emitSlow_op_jfalse(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
760 unsigned cond = currentInstruction[1].u.operand;
761 unsigned target = currentInstruction[2].u.operand;
765 if (supportsFloatingPoint()) {
766 // regT1 contains the tag from the hot path.
767 Jump notNumber = branch32(Above, regT1, Imm32(JSValue::LowestTag));
769 emitLoadDouble(cond, fpRegT0);
770 emitJumpSlowToHot(branchDoubleZeroOrNaN(fpRegT0, fpRegT1), target);
771 emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_jfalse));
773 notNumber.link(this);
776 JITStubCall stubCall(this, cti_op_jtrue);
777 stubCall.addArgument(cond);
779 emitJumpSlowToHot(branchTest32(Zero, regT0), target); // Inverted.
782 void JIT::emit_op_jtrue(Instruction* currentInstruction)
784 unsigned cond = currentInstruction[1].u.operand;
785 unsigned target = currentInstruction[2].u.operand;
787 emitLoad(cond, regT1, regT0);
789 ASSERT((JSValue::BooleanTag + 1 == JSValue::Int32Tag) && !(JSValue::Int32Tag + 1));
790 addSlowCase(branch32(Below, regT1, TrustedImm32(JSValue::BooleanTag)));
791 addJump(branchTest32(NonZero, regT0), target);
794 void JIT::emitSlow_op_jtrue(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
796 unsigned cond = currentInstruction[1].u.operand;
797 unsigned target = currentInstruction[2].u.operand;
801 if (supportsFloatingPoint()) {
802 // regT1 contains the tag from the hot path.
803 Jump notNumber = branch32(Above, regT1, Imm32(JSValue::LowestTag));
805 emitLoadDouble(cond, fpRegT0);
806 emitJumpSlowToHot(branchDoubleNonZero(fpRegT0, fpRegT1), target);
807 emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_jtrue));
809 notNumber.link(this);
812 JITStubCall stubCall(this, cti_op_jtrue);
813 stubCall.addArgument(cond);
815 emitJumpSlowToHot(branchTest32(NonZero, regT0), target);
818 void JIT::emit_op_jeq_null(Instruction* currentInstruction)
820 unsigned src = currentInstruction[1].u.operand;
821 unsigned target = currentInstruction[2].u.operand;
823 emitLoad(src, regT1, regT0);
825 Jump isImmediate = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
827 // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
828 loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
829 addJump(branchTest8(NonZero, Address(regT2, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined)), target);
831 Jump wasNotImmediate = jump();
833 // Now handle the immediate cases - undefined & null
834 isImmediate.link(this);
836 ASSERT((JSValue::UndefinedTag + 1 == JSValue::NullTag) && (JSValue::NullTag & 0x1));
837 or32(TrustedImm32(1), regT1);
838 addJump(branch32(Equal, regT1, TrustedImm32(JSValue::NullTag)), target);
840 wasNotImmediate.link(this);
843 void JIT::emit_op_jneq_null(Instruction* currentInstruction)
845 unsigned src = currentInstruction[1].u.operand;
846 unsigned target = currentInstruction[2].u.operand;
848 emitLoad(src, regT1, regT0);
850 Jump isImmediate = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
852 // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
853 loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
854 addJump(branchTest8(Zero, Address(regT2, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined)), target);
856 Jump wasNotImmediate = jump();
858 // Now handle the immediate cases - undefined & null
859 isImmediate.link(this);
861 ASSERT((JSValue::UndefinedTag + 1 == JSValue::NullTag) && (JSValue::NullTag & 0x1));
862 or32(TrustedImm32(1), regT1);
863 addJump(branch32(NotEqual, regT1, TrustedImm32(JSValue::NullTag)), target);
865 wasNotImmediate.link(this);
868 void JIT::emit_op_jneq_ptr(Instruction* currentInstruction)
870 unsigned src = currentInstruction[1].u.operand;
871 JSCell* ptr = currentInstruction[2].u.jsCell.get();
872 unsigned target = currentInstruction[3].u.operand;
874 emitLoad(src, regT1, regT0);
875 addJump(branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag)), target);
876 addJump(branchPtr(NotEqual, regT0, TrustedImmPtr(ptr)), target);
879 void JIT::emit_op_jsr(Instruction* currentInstruction)
881 int retAddrDst = currentInstruction[1].u.operand;
882 int target = currentInstruction[2].u.operand;
883 DataLabelPtr storeLocation = storePtrWithPatch(TrustedImmPtr(0), Address(callFrameRegister, sizeof(Register) * retAddrDst));
884 addJump(jump(), target);
885 m_jsrSites.append(JSRInfo(storeLocation, label()));
888 void JIT::emit_op_sret(Instruction* currentInstruction)
890 jump(Address(callFrameRegister, sizeof(Register) * currentInstruction[1].u.operand));
893 void JIT::emit_op_eq(Instruction* currentInstruction)
895 unsigned dst = currentInstruction[1].u.operand;
896 unsigned src1 = currentInstruction[2].u.operand;
897 unsigned src2 = currentInstruction[3].u.operand;
899 emitLoad2(src1, regT1, regT0, src2, regT3, regT2);
900 addSlowCase(branch32(NotEqual, regT1, regT3));
901 addSlowCase(branch32(Equal, regT1, TrustedImm32(JSValue::CellTag)));
902 addSlowCase(branch32(Below, regT1, TrustedImm32(JSValue::LowestTag)));
904 compare32(Equal, regT0, regT2, regT0);
906 emitStoreBool(dst, regT0);
909 void JIT::emitSlow_op_eq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
911 unsigned dst = currentInstruction[1].u.operand;
912 unsigned op1 = currentInstruction[2].u.operand;
913 unsigned op2 = currentInstruction[3].u.operand;
915 JumpList storeResult;
916 JumpList genericCase;
918 genericCase.append(getSlowCase(iter)); // tags not equal
920 linkSlowCase(iter); // tags equal and JSCell
921 genericCase.append(branchPtr(NotEqual, Address(regT0), TrustedImmPtr(m_globalData->jsStringVPtr)));
922 genericCase.append(branchPtr(NotEqual, Address(regT2), TrustedImmPtr(m_globalData->jsStringVPtr)));
925 JITStubCall stubCallEqStrings(this, cti_op_eq_strings);
926 stubCallEqStrings.addArgument(regT0);
927 stubCallEqStrings.addArgument(regT2);
928 stubCallEqStrings.call();
929 storeResult.append(jump());
932 genericCase.append(getSlowCase(iter)); // doubles
933 genericCase.link(this);
934 JITStubCall stubCallEq(this, cti_op_eq);
935 stubCallEq.addArgument(op1);
936 stubCallEq.addArgument(op2);
937 stubCallEq.call(regT0);
939 storeResult.link(this);
940 emitStoreBool(dst, regT0);
943 void JIT::emit_op_neq(Instruction* currentInstruction)
945 unsigned dst = currentInstruction[1].u.operand;
946 unsigned src1 = currentInstruction[2].u.operand;
947 unsigned src2 = currentInstruction[3].u.operand;
949 emitLoad2(src1, regT1, regT0, src2, regT3, regT2);
950 addSlowCase(branch32(NotEqual, regT1, regT3));
951 addSlowCase(branch32(Equal, regT1, TrustedImm32(JSValue::CellTag)));
952 addSlowCase(branch32(Below, regT1, TrustedImm32(JSValue::LowestTag)));
954 compare32(NotEqual, regT0, regT2, regT0);
956 emitStoreBool(dst, regT0);
959 void JIT::emitSlow_op_neq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
961 unsigned dst = currentInstruction[1].u.operand;
963 JumpList storeResult;
964 JumpList genericCase;
966 genericCase.append(getSlowCase(iter)); // tags not equal
968 linkSlowCase(iter); // tags equal and JSCell
969 genericCase.append(branchPtr(NotEqual, Address(regT0), TrustedImmPtr(m_globalData->jsStringVPtr)));
970 genericCase.append(branchPtr(NotEqual, Address(regT2), TrustedImmPtr(m_globalData->jsStringVPtr)));
973 JITStubCall stubCallEqStrings(this, cti_op_eq_strings);
974 stubCallEqStrings.addArgument(regT0);
975 stubCallEqStrings.addArgument(regT2);
976 stubCallEqStrings.call(regT0);
977 storeResult.append(jump());
980 genericCase.append(getSlowCase(iter)); // doubles
981 genericCase.link(this);
982 JITStubCall stubCallEq(this, cti_op_eq);
983 stubCallEq.addArgument(regT1, regT0);
984 stubCallEq.addArgument(regT3, regT2);
985 stubCallEq.call(regT0);
987 storeResult.link(this);
988 xor32(TrustedImm32(0x1), regT0);
989 emitStoreBool(dst, regT0);
992 void JIT::compileOpStrictEq(Instruction* currentInstruction, CompileOpStrictEqType type)
994 unsigned dst = currentInstruction[1].u.operand;
995 unsigned src1 = currentInstruction[2].u.operand;
996 unsigned src2 = currentInstruction[3].u.operand;
998 emitLoad2(src1, regT1, regT0, src2, regT3, regT2);
1000 // Bail if the tags differ, or are double.
1001 addSlowCase(branch32(NotEqual, regT1, regT3));
1002 addSlowCase(branch32(Below, regT1, TrustedImm32(JSValue::LowestTag)));
1004 // Jump to a slow case if both are strings.
1005 Jump notCell = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
1006 Jump firstNotString = branchPtr(NotEqual, Address(regT0), TrustedImmPtr(m_globalData->jsStringVPtr));
1007 addSlowCase(branchPtr(Equal, Address(regT2), TrustedImmPtr(m_globalData->jsStringVPtr)));
1009 firstNotString.link(this);
1011 // Simply compare the payloads.
1012 if (type == OpStrictEq)
1013 compare32(Equal, regT0, regT2, regT0);
1015 compare32(NotEqual, regT0, regT2, regT0);
1017 emitStoreBool(dst, regT0);
1020 void JIT::emit_op_stricteq(Instruction* currentInstruction)
1022 compileOpStrictEq(currentInstruction, OpStrictEq);
1025 void JIT::emitSlow_op_stricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1027 unsigned dst = currentInstruction[1].u.operand;
1028 unsigned src1 = currentInstruction[2].u.operand;
1029 unsigned src2 = currentInstruction[3].u.operand;
1035 JITStubCall stubCall(this, cti_op_stricteq);
1036 stubCall.addArgument(src1);
1037 stubCall.addArgument(src2);
1041 void JIT::emit_op_nstricteq(Instruction* currentInstruction)
1043 compileOpStrictEq(currentInstruction, OpNStrictEq);
1046 void JIT::emitSlow_op_nstricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1048 unsigned dst = currentInstruction[1].u.operand;
1049 unsigned src1 = currentInstruction[2].u.operand;
1050 unsigned src2 = currentInstruction[3].u.operand;
1056 JITStubCall stubCall(this, cti_op_nstricteq);
1057 stubCall.addArgument(src1);
1058 stubCall.addArgument(src2);
1062 void JIT::emit_op_eq_null(Instruction* currentInstruction)
1064 unsigned dst = currentInstruction[1].u.operand;
1065 unsigned src = currentInstruction[2].u.operand;
1067 emitLoad(src, regT1, regT0);
1068 Jump isImmediate = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
1070 loadPtr(Address(regT0, JSCell::structureOffset()), regT1);
1071 test8(NonZero, Address(regT1, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined), regT1);
1073 Jump wasNotImmediate = jump();
1075 isImmediate.link(this);
1077 compare32(Equal, regT1, TrustedImm32(JSValue::NullTag), regT2);
1078 compare32(Equal, regT1, TrustedImm32(JSValue::UndefinedTag), regT1);
1081 wasNotImmediate.link(this);
1083 emitStoreBool(dst, regT1);
1086 void JIT::emit_op_neq_null(Instruction* currentInstruction)
1088 unsigned dst = currentInstruction[1].u.operand;
1089 unsigned src = currentInstruction[2].u.operand;
1091 emitLoad(src, regT1, regT0);
1092 Jump isImmediate = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
1094 loadPtr(Address(regT0, JSCell::structureOffset()), regT1);
1095 test8(Zero, Address(regT1, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined), regT1);
1097 Jump wasNotImmediate = jump();
1099 isImmediate.link(this);
1101 compare32(NotEqual, regT1, TrustedImm32(JSValue::NullTag), regT2);
1102 compare32(NotEqual, regT1, TrustedImm32(JSValue::UndefinedTag), regT1);
1103 and32(regT2, regT1);
1105 wasNotImmediate.link(this);
1107 emitStoreBool(dst, regT1);
1110 void JIT::emit_op_resolve_with_base(Instruction* currentInstruction)
1112 JITStubCall stubCall(this, cti_op_resolve_with_base);
1113 stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[3].u.operand)));
1114 stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
1115 stubCall.call(currentInstruction[2].u.operand);
1118 void JIT::emit_op_resolve_with_this(Instruction* currentInstruction)
1120 JITStubCall stubCall(this, cti_op_resolve_with_this);
1121 stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[3].u.operand)));
1122 stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
1123 stubCall.call(currentInstruction[2].u.operand);
1126 void JIT::emit_op_throw(Instruction* currentInstruction)
1128 unsigned exception = currentInstruction[1].u.operand;
1129 JITStubCall stubCall(this, cti_op_throw);
1130 stubCall.addArgument(exception);
1134 // cti_op_throw always changes it's return address,
1135 // this point in the code should never be reached.
1140 void JIT::emit_op_get_pnames(Instruction* currentInstruction)
1142 int dst = currentInstruction[1].u.operand;
1143 int base = currentInstruction[2].u.operand;
1144 int i = currentInstruction[3].u.operand;
1145 int size = currentInstruction[4].u.operand;
1146 int breakTarget = currentInstruction[5].u.operand;
1148 JumpList isNotObject;
1150 emitLoad(base, regT1, regT0);
1151 if (!m_codeBlock->isKnownNotImmediate(base))
1152 isNotObject.append(branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag)));
1153 if (base != m_codeBlock->thisRegister() || m_codeBlock->isStrictMode()) {
1154 loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
1155 isNotObject.append(emitJumpIfNotObject(regT2));
1158 // We could inline the case where you have a valid cache, but
1159 // this call doesn't seem to be hot.
1160 Label isObject(this);
1161 JITStubCall getPnamesStubCall(this, cti_op_get_pnames);
1162 getPnamesStubCall.addArgument(regT0);
1163 getPnamesStubCall.call(dst);
1164 load32(Address(regT0, OBJECT_OFFSETOF(JSPropertyNameIterator, m_jsStringsSize)), regT3);
1165 store32(TrustedImm32(Int32Tag), intTagFor(i));
1166 store32(TrustedImm32(0), intPayloadFor(i));
1167 store32(TrustedImm32(Int32Tag), intTagFor(size));
1168 store32(regT3, payloadFor(size));
1171 isNotObject.link(this);
1172 addJump(branch32(Equal, regT1, TrustedImm32(JSValue::NullTag)), breakTarget);
1173 addJump(branch32(Equal, regT1, TrustedImm32(JSValue::UndefinedTag)), breakTarget);
1174 JITStubCall toObjectStubCall(this, cti_to_object);
1175 toObjectStubCall.addArgument(regT1, regT0);
1176 toObjectStubCall.call(base);
1177 jump().linkTo(isObject, this);
1182 void JIT::emit_op_next_pname(Instruction* currentInstruction)
1184 int dst = currentInstruction[1].u.operand;
1185 int base = currentInstruction[2].u.operand;
1186 int i = currentInstruction[3].u.operand;
1187 int size = currentInstruction[4].u.operand;
1188 int it = currentInstruction[5].u.operand;
1189 int target = currentInstruction[6].u.operand;
1191 JumpList callHasProperty;
1194 load32(intPayloadFor(i), regT0);
1195 Jump end = branch32(Equal, regT0, intPayloadFor(size));
1198 loadPtr(payloadFor(it), regT1);
1199 loadPtr(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_jsStrings)), regT2);
1200 load32(BaseIndex(regT2, regT0, TimesEight), regT2);
1201 store32(TrustedImm32(JSValue::CellTag), tagFor(dst));
1202 store32(regT2, payloadFor(dst));
1205 add32(TrustedImm32(1), regT0);
1206 store32(regT0, intPayloadFor(i));
1208 // Verify that i is valid:
1209 loadPtr(payloadFor(base), regT0);
1211 // Test base's structure
1212 loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
1213 callHasProperty.append(branchPtr(NotEqual, regT2, Address(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_cachedStructure)))));
1215 // Test base's prototype chain
1216 loadPtr(Address(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_cachedPrototypeChain))), regT3);
1217 loadPtr(Address(regT3, OBJECT_OFFSETOF(StructureChain, m_vector)), regT3);
1218 addJump(branchTestPtr(Zero, Address(regT3)), target);
1220 Label checkPrototype(this);
1221 callHasProperty.append(branch32(Equal, Address(regT2, Structure::prototypeOffset() + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), TrustedImm32(JSValue::NullTag)));
1222 loadPtr(Address(regT2, Structure::prototypeOffset() + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT2);
1223 loadPtr(Address(regT2, JSCell::structureOffset()), regT2);
1224 callHasProperty.append(branchPtr(NotEqual, regT2, Address(regT3)));
1225 addPtr(TrustedImm32(sizeof(Structure*)), regT3);
1226 branchTestPtr(NonZero, Address(regT3)).linkTo(checkPrototype, this);
1229 addJump(jump(), target);
1231 // Slow case: Ask the object if i is valid.
1232 callHasProperty.link(this);
1233 loadPtr(addressFor(dst), regT1);
1234 JITStubCall stubCall(this, cti_has_property);
1235 stubCall.addArgument(regT0);
1236 stubCall.addArgument(regT1);
1239 // Test for valid key.
1240 addJump(branchTest32(NonZero, regT0), target);
1241 jump().linkTo(begin, this);
1247 void JIT::emit_op_push_scope(Instruction* currentInstruction)
1249 JITStubCall stubCall(this, cti_op_push_scope);
1250 stubCall.addArgument(currentInstruction[1].u.operand);
1251 stubCall.call(currentInstruction[1].u.operand);
1254 void JIT::emit_op_pop_scope(Instruction*)
1256 JITStubCall(this, cti_op_pop_scope).call();
1259 void JIT::emit_op_to_jsnumber(Instruction* currentInstruction)
1261 int dst = currentInstruction[1].u.operand;
1262 int src = currentInstruction[2].u.operand;
1264 emitLoad(src, regT1, regT0);
1266 Jump isInt32 = branch32(Equal, regT1, TrustedImm32(JSValue::Int32Tag));
1267 addSlowCase(branch32(AboveOrEqual, regT1, TrustedImm32(JSValue::EmptyValueTag)));
1271 emitStore(dst, regT1, regT0);
1272 map(m_bytecodeOffset + OPCODE_LENGTH(op_to_jsnumber), dst, regT1, regT0);
1275 void JIT::emitSlow_op_to_jsnumber(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1277 int dst = currentInstruction[1].u.operand;
1281 JITStubCall stubCall(this, cti_op_to_jsnumber);
1282 stubCall.addArgument(regT1, regT0);
1286 void JIT::emit_op_push_new_scope(Instruction* currentInstruction)
1288 JITStubCall stubCall(this, cti_op_push_new_scope);
1289 stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
1290 stubCall.addArgument(currentInstruction[3].u.operand);
1291 stubCall.call(currentInstruction[1].u.operand);
1294 void JIT::emit_op_catch(Instruction* currentInstruction)
1296 // cti_op_throw returns the callFrame for the handler.
1297 move(regT0, callFrameRegister);
1299 // Now store the exception returned by cti_op_throw.
1300 loadPtr(Address(stackPointerRegister, OBJECT_OFFSETOF(struct JITStackFrame, globalData)), regT3);
1301 load32(Address(regT3, OBJECT_OFFSETOF(JSGlobalData, exception) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT0);
1302 load32(Address(regT3, OBJECT_OFFSETOF(JSGlobalData, exception) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT1);
1303 store32(TrustedImm32(JSValue().payload()), Address(regT3, OBJECT_OFFSETOF(JSGlobalData, exception) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)));
1304 store32(TrustedImm32(JSValue().tag()), Address(regT3, OBJECT_OFFSETOF(JSGlobalData, exception) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)));
1306 unsigned exception = currentInstruction[1].u.operand;
1307 emitStore(exception, regT1, regT0);
1308 map(m_bytecodeOffset + OPCODE_LENGTH(op_catch), exception, regT1, regT0);
1311 void JIT::emit_op_jmp_scopes(Instruction* currentInstruction)
1313 JITStubCall stubCall(this, cti_op_jmp_scopes);
1314 stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
1316 addJump(jump(), currentInstruction[2].u.operand);
1319 void JIT::emit_op_switch_imm(Instruction* currentInstruction)
1321 unsigned tableIndex = currentInstruction[1].u.operand;
1322 unsigned defaultOffset = currentInstruction[2].u.operand;
1323 unsigned scrutinee = currentInstruction[3].u.operand;
1325 // create jump table for switch destinations, track this switch statement.
1326 SimpleJumpTable* jumpTable = &m_codeBlock->immediateSwitchJumpTable(tableIndex);
1327 m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset, SwitchRecord::Immediate));
1328 jumpTable->ctiOffsets.grow(jumpTable->branchOffsets.size());
1330 JITStubCall stubCall(this, cti_op_switch_imm);
1331 stubCall.addArgument(scrutinee);
1332 stubCall.addArgument(Imm32(tableIndex));
1337 void JIT::emit_op_switch_char(Instruction* currentInstruction)
1339 unsigned tableIndex = currentInstruction[1].u.operand;
1340 unsigned defaultOffset = currentInstruction[2].u.operand;
1341 unsigned scrutinee = currentInstruction[3].u.operand;
1343 // create jump table for switch destinations, track this switch statement.
1344 SimpleJumpTable* jumpTable = &m_codeBlock->characterSwitchJumpTable(tableIndex);
1345 m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset, SwitchRecord::Character));
1346 jumpTable->ctiOffsets.grow(jumpTable->branchOffsets.size());
1348 JITStubCall stubCall(this, cti_op_switch_char);
1349 stubCall.addArgument(scrutinee);
1350 stubCall.addArgument(Imm32(tableIndex));
1355 void JIT::emit_op_switch_string(Instruction* currentInstruction)
1357 unsigned tableIndex = currentInstruction[1].u.operand;
1358 unsigned defaultOffset = currentInstruction[2].u.operand;
1359 unsigned scrutinee = currentInstruction[3].u.operand;
1361 // create jump table for switch destinations, track this switch statement.
1362 StringJumpTable* jumpTable = &m_codeBlock->stringSwitchJumpTable(tableIndex);
1363 m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset));
1365 JITStubCall stubCall(this, cti_op_switch_string);
1366 stubCall.addArgument(scrutinee);
1367 stubCall.addArgument(Imm32(tableIndex));
1372 void JIT::emit_op_throw_reference_error(Instruction* currentInstruction)
1374 unsigned message = currentInstruction[1].u.operand;
1376 JITStubCall stubCall(this, cti_op_throw_reference_error);
1377 stubCall.addArgument(m_codeBlock->getConstant(message));
1381 void JIT::emit_op_debug(Instruction* currentInstruction)
1383 #if ENABLE(DEBUG_WITH_BREAKPOINT)
1384 UNUSED_PARAM(currentInstruction);
1387 JITStubCall stubCall(this, cti_op_debug);
1388 stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
1389 stubCall.addArgument(Imm32(currentInstruction[2].u.operand));
1390 stubCall.addArgument(Imm32(currentInstruction[3].u.operand));
1396 void JIT::emit_op_enter(Instruction*)
1398 // Even though JIT code doesn't use them, we initialize our constant
1399 // registers to zap stale pointers, to avoid unnecessarily prolonging
1400 // object lifetime and increasing GC pressure.
1401 for (int i = 0; i < m_codeBlock->m_numVars; ++i)
1402 emitStore(i, jsUndefined());
1405 void JIT::emit_op_create_activation(Instruction* currentInstruction)
1407 unsigned activation = currentInstruction[1].u.operand;
1409 Jump activationCreated = branch32(NotEqual, tagFor(activation), TrustedImm32(JSValue::EmptyValueTag));
1410 JITStubCall(this, cti_op_push_activation).call(activation);
1411 activationCreated.link(this);
1414 void JIT::emit_op_create_arguments(Instruction* currentInstruction)
1416 unsigned dst = currentInstruction[1].u.operand;
1418 Jump argsCreated = branch32(NotEqual, tagFor(dst), TrustedImm32(JSValue::EmptyValueTag));
1420 if (m_codeBlock->m_numParameters == 1)
1421 JITStubCall(this, cti_op_create_arguments_no_params).call();
1423 JITStubCall(this, cti_op_create_arguments).call();
1425 emitStore(dst, regT1, regT0);
1426 emitStore(unmodifiedArgumentsRegister(dst), regT1, regT0);
1428 argsCreated.link(this);
1431 void JIT::emit_op_init_lazy_reg(Instruction* currentInstruction)
1433 unsigned dst = currentInstruction[1].u.operand;
1435 emitStore(dst, JSValue());
1438 void JIT::emit_op_get_callee(Instruction* currentInstruction)
1440 int dst = currentInstruction[1].u.operand;
1441 emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, regT0);
1442 emitStoreCell(dst, regT0);
1445 void JIT::emit_op_create_this(Instruction* currentInstruction)
1447 emitLoad(currentInstruction[2].u.operand, regT1, regT0);
1448 emitJumpSlowCaseIfNotJSCell(currentInstruction[2].u.operand, regT1);
1449 loadPtr(Address(regT0, JSCell::structureOffset()), regT1);
1450 addSlowCase(emitJumpIfNotObject(regT1));
1452 // now we know that the prototype is an object, but we don't know if it's got an
1455 loadPtr(Address(regT0, JSObject::offsetOfInheritorID()), regT2);
1456 addSlowCase(branchTestPtr(Zero, regT2));
1458 // now regT2 contains the inheritorID, which is the structure that the newly
1459 // allocated object will have.
1461 emitAllocateJSFinalObject(regT2, regT0, regT1);
1463 emitStoreCell(currentInstruction[1].u.operand, regT0);
1466 void JIT::emitSlow_op_create_this(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1468 linkSlowCaseIfNotJSCell(iter, currentInstruction[2].u.operand); // not a cell
1469 linkSlowCase(iter); // not an object
1470 linkSlowCase(iter); // doesn't have an inheritor ID
1471 linkSlowCase(iter); // allocation failed
1472 unsigned protoRegister = currentInstruction[2].u.operand;
1473 emitLoad(protoRegister, regT1, regT0);
1474 JITStubCall stubCall(this, cti_op_create_this);
1475 stubCall.addArgument(regT1, regT0);
1476 stubCall.call(currentInstruction[1].u.operand);
1479 void JIT::emit_op_convert_this(Instruction* currentInstruction)
1481 unsigned thisRegister = currentInstruction[1].u.operand;
1483 emitLoad(thisRegister, regT1, regT0);
1485 addSlowCase(branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag)));
1486 addSlowCase(branchPtr(Equal, Address(regT0), TrustedImmPtr(m_globalData->jsStringVPtr)));
1488 map(m_bytecodeOffset + OPCODE_LENGTH(op_convert_this), thisRegister, regT1, regT0);
1491 void JIT::emitSlow_op_convert_this(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1493 void* globalThis = m_codeBlock->globalObject()->globalScopeChain()->globalThis.get();
1494 unsigned thisRegister = currentInstruction[1].u.operand;
1497 Jump isNotUndefined = branch32(NotEqual, regT1, TrustedImm32(JSValue::UndefinedTag));
1498 move(TrustedImmPtr(globalThis), regT0);
1499 move(TrustedImm32(JSValue::CellTag), regT1);
1500 emitStore(thisRegister, regT1, regT0);
1501 emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_convert_this));
1503 isNotUndefined.link(this);
1505 JITStubCall stubCall(this, cti_op_convert_this);
1506 stubCall.addArgument(regT1, regT0);
1507 stubCall.call(thisRegister);
1510 void JIT::emit_op_profile_will_call(Instruction* currentInstruction)
1512 peek(regT2, OBJECT_OFFSETOF(JITStackFrame, enabledProfilerReference) / sizeof(void*));
1513 Jump noProfiler = branchTestPtr(Zero, Address(regT2));
1515 JITStubCall stubCall(this, cti_op_profile_will_call);
1516 stubCall.addArgument(currentInstruction[1].u.operand);
1518 noProfiler.link(this);
1521 void JIT::emit_op_profile_did_call(Instruction* currentInstruction)
1523 peek(regT2, OBJECT_OFFSETOF(JITStackFrame, enabledProfilerReference) / sizeof(void*));
1524 Jump noProfiler = branchTestPtr(Zero, Address(regT2));
1526 JITStubCall stubCall(this, cti_op_profile_did_call);
1527 stubCall.addArgument(currentInstruction[1].u.operand);
1529 noProfiler.link(this);
1532 void JIT::emit_op_get_arguments_length(Instruction* currentInstruction)
1534 int dst = currentInstruction[1].u.operand;
1535 int argumentsRegister = currentInstruction[2].u.operand;
1536 addSlowCase(branch32(NotEqual, tagFor(argumentsRegister), TrustedImm32(JSValue::EmptyValueTag)));
1537 emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT0);
1538 sub32(TrustedImm32(1), regT0);
1539 emitStoreInt32(dst, regT0);
1542 void JIT::emitSlow_op_get_arguments_length(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1545 int dst = currentInstruction[1].u.operand;
1546 int base = currentInstruction[2].u.operand;
1547 int ident = currentInstruction[3].u.operand;
1549 JITStubCall stubCall(this, cti_op_get_by_id_generic);
1550 stubCall.addArgument(base);
1551 stubCall.addArgument(TrustedImmPtr(&(m_codeBlock->identifier(ident))));
1555 void JIT::emit_op_get_argument_by_val(Instruction* currentInstruction)
1557 int dst = currentInstruction[1].u.operand;
1558 int argumentsRegister = currentInstruction[2].u.operand;
1559 int property = currentInstruction[3].u.operand;
1560 addSlowCase(branch32(NotEqual, tagFor(argumentsRegister), TrustedImm32(JSValue::EmptyValueTag)));
1561 emitLoad(property, regT1, regT2);
1562 addSlowCase(branch32(NotEqual, regT1, TrustedImm32(JSValue::Int32Tag)));
1563 add32(TrustedImm32(1), regT2);
1564 // regT2 now contains the integer index of the argument we want, including this
1565 emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT3);
1566 addSlowCase(branch32(AboveOrEqual, regT2, regT3));
1568 Jump skipOutofLineParams;
1569 int numArgs = m_codeBlock->m_numParameters;
1571 Jump notInInPlaceArgs = branch32(AboveOrEqual, regT2, Imm32(numArgs));
1572 addPtr(Imm32(static_cast<unsigned>(-(RegisterFile::CallFrameHeaderSize + numArgs) * sizeof(Register))), callFrameRegister, regT1);
1573 loadPtr(BaseIndex(regT1, regT2, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT0);
1574 loadPtr(BaseIndex(regT1, regT2, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT1);
1575 skipOutofLineParams = jump();
1576 notInInPlaceArgs.link(this);
1579 addPtr(Imm32(static_cast<unsigned>(-(RegisterFile::CallFrameHeaderSize + numArgs) * sizeof(Register))), callFrameRegister, regT1);
1580 mul32(TrustedImm32(sizeof(Register)), regT3, regT3);
1581 subPtr(regT3, regT1);
1582 loadPtr(BaseIndex(regT1, regT2, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT0);
1583 loadPtr(BaseIndex(regT1, regT2, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT1);
1585 skipOutofLineParams.link(this);
1586 emitStore(dst, regT1, regT0);
1589 void JIT::emitSlow_op_get_argument_by_val(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1591 unsigned dst = currentInstruction[1].u.operand;
1592 unsigned arguments = currentInstruction[2].u.operand;
1593 unsigned property = currentInstruction[3].u.operand;
1596 Jump skipArgumentsCreation = jump();
1600 if (m_codeBlock->m_numParameters == 1)
1601 JITStubCall(this, cti_op_create_arguments_no_params).call();
1603 JITStubCall(this, cti_op_create_arguments).call();
1605 emitStore(arguments, regT1, regT0);
1606 emitStore(unmodifiedArgumentsRegister(arguments), regT1, regT0);
1608 skipArgumentsCreation.link(this);
1609 JITStubCall stubCall(this, cti_op_get_by_val);
1610 stubCall.addArgument(arguments);
1611 stubCall.addArgument(property);
1615 #if ENABLE(JIT_USE_SOFT_MODULO)
1616 void JIT::softModulo()
1622 move(TrustedImm32(0), regT1);
1624 // Check for negative result reminder
1625 Jump positiveRegT3 = branch32(GreaterThanOrEqual, regT3, TrustedImm32(0));
1627 xor32(TrustedImm32(1), regT1);
1628 positiveRegT3.link(this);
1630 Jump positiveRegT2 = branch32(GreaterThanOrEqual, regT2, TrustedImm32(0));
1632 xor32(TrustedImm32(2), regT1);
1633 positiveRegT2.link(this);
1635 // Save the condition for negative reminder
1638 Jump exitBranch = branch32(LessThan, regT2, regT3);
1640 // Power of two fast case
1642 sub32(TrustedImm32(1), regT0);
1643 Jump powerOfTwo = branchTest32(NonZero, regT0, regT3);
1644 and32(regT0, regT2);
1645 powerOfTwo.link(this);
1647 and32(regT3, regT0);
1649 Jump exitBranch2 = branchTest32(Zero, regT0);
1651 countLeadingZeros32(regT2, regT0);
1652 countLeadingZeros32(regT3, regT1);
1653 sub32(regT0, regT1);
1655 Jump useFullTable = branch32(Equal, regT1, TrustedImm32(31));
1658 add32(TrustedImm32(31), regT1);
1660 int elementSizeByShift = -1;
1662 elementSizeByShift = 3;
1664 #error "JIT_USE_SOFT_MODULO not yet supported on this platform."
1666 relativeTableJump(regT1, elementSizeByShift);
1668 useFullTable.link(this);
1670 for (int i = 31; i > 0; --i) {
1671 #if CPU(ARM_TRADITIONAL)
1672 m_assembler.cmp_r(regT2, m_assembler.lsl(regT3, i));
1673 m_assembler.sub_r(regT2, regT2, m_assembler.lsl(regT3, i), ARMAssembler::CS);
1674 #elif CPU(ARM_THUMB2)
1675 ShiftTypeAndAmount shift(SRType_LSL, i);
1676 m_assembler.sub_S(regT1, regT2, regT3, shift);
1677 m_assembler.it(ARMv7Assembler::ConditionCS);
1678 m_assembler.mov(regT2, regT1);
1680 #error "JIT_USE_SOFT_MODULO not yet supported on this platform."
1684 Jump lower = branch32(Below, regT2, regT3);
1685 sub32(regT3, regT2);
1688 exitBranch.link(this);
1689 exitBranch2.link(this);
1691 // Check for negative reminder
1693 Jump positiveResult = branch32(Equal, regT1, TrustedImm32(0));
1695 positiveResult.link(this);
1703 #endif // ENABLE(JIT_USE_SOFT_MODULO)
1707 #endif // USE(JSVALUE32_64)
1708 #endif // ENABLE(JIT)