initial import
[vuplus_webkit] / Source / JavaScriptCore / jit / JITOpcodes32_64.cpp
1 /*
2  * Copyright (C) 2009 Apple Inc. All rights reserved.
3  * Copyright (C) 2010 Patrick Gansterer <paroga@paroga.com>
4  *
5  * Redistribution and use in source and binary forms, with or without
6  * modification, are permitted provided that the following conditions
7  * are met:
8  * 1. Redistributions of source code must retain the above copyright
9  *    notice, this list of conditions and the following disclaimer.
10  * 2. Redistributions in binary form must reproduce the above copyright
11  *    notice, this list of conditions and the following disclaimer in the
12  *    documentation and/or other materials provided with the distribution.
13  *
14  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
15  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
16  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
17  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
18  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
19  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
20  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
21  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
22  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
23  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
24  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
25  */
26
27 #include "config.h"
28
29 #if ENABLE(JIT)
30 #if USE(JSVALUE32_64)
31 #include "JIT.h"
32
33 #include "JITInlineMethods.h"
34 #include "JITStubCall.h"
35 #include "JSArray.h"
36 #include "JSCell.h"
37 #include "JSFunction.h"
38 #include "JSPropertyNameIterator.h"
39 #include "LinkBuffer.h"
40
41 namespace JSC {
42
43 PassRefPtr<ExecutableMemoryHandle> JIT::privateCompileCTIMachineTrampolines(JSGlobalData* globalData, TrampolineStructure *trampolines)
44 {
45 #if ENABLE(JIT_USE_SOFT_MODULO)
46     Label softModBegin = align();
47     softModulo();
48 #endif
49     // (1) This function provides fast property access for string length
50     Label stringLengthBegin = align();
51
52     // regT0 holds payload, regT1 holds tag
53
54     Jump string_failureCases1 = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
55     Jump string_failureCases2 = branchPtr(NotEqual, Address(regT0), TrustedImmPtr(m_globalData->jsStringVPtr));
56
57     // Checks out okay! - get the length from the Ustring.
58     load32(Address(regT0, OBJECT_OFFSETOF(JSString, m_length)), regT2);
59
60     Jump string_failureCases3 = branch32(Above, regT2, TrustedImm32(INT_MAX));
61     move(regT2, regT0);
62     move(TrustedImm32(JSValue::Int32Tag), regT1);
63
64     ret();
65     
66     JumpList callLinkFailures;
67     // (2) Trampolines for the slow cases of op_call / op_call_eval / op_construct.
68     // VirtualCallLink Trampoline
69     // regT0 holds callee, regT1 holds argCount.  regT2 will hold the FunctionExecutable.
70     Label virtualCallLinkBegin = align();
71     compileOpCallInitializeCallFrame();
72     preserveReturnAddressAfterCall(regT3);
73     emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
74     restoreArgumentReference();
75     Call callLazyLinkCall = call();
76     callLinkFailures.append(branchTestPtr(Zero, regT0));
77     restoreReturnAddressBeforeReturn(regT3);
78     emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT1);
79     jump(regT0);
80
81     // VirtualConstructLink Trampoline
82     // regT0 holds callee, regT1 holds argCount.  regT2 will hold the FunctionExecutable.
83     Label virtualConstructLinkBegin = align();
84     compileOpCallInitializeCallFrame();
85     preserveReturnAddressAfterCall(regT3);
86     emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
87     restoreArgumentReference();
88     Call callLazyLinkConstruct = call();
89     restoreReturnAddressBeforeReturn(regT3);
90     callLinkFailures.append(branchTestPtr(Zero, regT0));
91     emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT1);
92     jump(regT0);
93
94     // VirtualCall Trampoline
95     // regT0 holds callee, regT1 holds argCount.  regT2 will hold the FunctionExecutable.
96     Label virtualCallBegin = align();
97     compileOpCallInitializeCallFrame();
98
99     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
100
101     Jump hasCodeBlock3 = branch32(GreaterThanOrEqual, Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParametersForCall)), TrustedImm32(0));
102     preserveReturnAddressAfterCall(regT3);
103     restoreArgumentReference();
104     Call callCompileCall = call();
105     callLinkFailures.append(branchTestPtr(Zero, regT0));
106     emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT1);
107     restoreReturnAddressBeforeReturn(regT3);
108     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
109     hasCodeBlock3.link(this);
110
111     loadPtr(Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_jitCodeForCallWithArityCheck)), regT0);
112     jump(regT0);
113
114     // VirtualConstruct Trampoline
115     // regT0 holds callee, regT1 holds argCount.  regT2 will hold the FunctionExecutable.
116     Label virtualConstructBegin = align();
117     compileOpCallInitializeCallFrame();
118
119     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
120
121     Jump hasCodeBlock4 = branch32(GreaterThanOrEqual, Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParametersForConstruct)), TrustedImm32(0));
122     preserveReturnAddressAfterCall(regT3);
123     restoreArgumentReference();
124     Call callCompileCconstruct = call();
125     callLinkFailures.append(branchTestPtr(Zero, regT0));
126     emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT1);
127     restoreReturnAddressBeforeReturn(regT3);
128     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
129     hasCodeBlock4.link(this);
130
131     loadPtr(Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_jitCodeForConstructWithArityCheck)), regT0);
132     jump(regT0);
133     
134     // If the parser fails we want to be able to be able to keep going,
135     // So we handle this as a parse failure.
136     callLinkFailures.link(this);
137     emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC, regT1);
138     emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, callFrameRegister);
139     restoreReturnAddressBeforeReturn(regT1);
140     move(TrustedImmPtr(&globalData->exceptionLocation), regT2);
141     storePtr(regT1, regT2);
142     poke(callFrameRegister, 1 + OBJECT_OFFSETOF(struct JITStackFrame, callFrame) / sizeof(void*));
143     poke(TrustedImmPtr(FunctionPtr(ctiVMThrowTrampoline).value()));
144     ret();
145
146     // NativeCall Trampoline
147     Label nativeCallThunk = privateCompileCTINativeCall(globalData);    
148     Label nativeConstructThunk = privateCompileCTINativeCall(globalData, true);    
149
150     Call string_failureCases1Call = makeTailRecursiveCall(string_failureCases1);
151     Call string_failureCases2Call = makeTailRecursiveCall(string_failureCases2);
152     Call string_failureCases3Call = makeTailRecursiveCall(string_failureCases3);
153
154     // All trampolines constructed! copy the code, link up calls, and set the pointers on the Machine object.
155     LinkBuffer patchBuffer(*m_globalData, this);
156
157     patchBuffer.link(string_failureCases1Call, FunctionPtr(cti_op_get_by_id_string_fail));
158     patchBuffer.link(string_failureCases2Call, FunctionPtr(cti_op_get_by_id_string_fail));
159     patchBuffer.link(string_failureCases3Call, FunctionPtr(cti_op_get_by_id_string_fail));
160     patchBuffer.link(callLazyLinkCall, FunctionPtr(cti_vm_lazyLinkCall));
161     patchBuffer.link(callLazyLinkConstruct, FunctionPtr(cti_vm_lazyLinkConstruct));
162     patchBuffer.link(callCompileCall, FunctionPtr(cti_op_call_jitCompile));
163     patchBuffer.link(callCompileCconstruct, FunctionPtr(cti_op_construct_jitCompile));
164
165     CodeRef finalCode = patchBuffer.finalizeCode();
166     RefPtr<ExecutableMemoryHandle> executableMemory = finalCode.executableMemory();
167
168     trampolines->ctiVirtualCall = patchBuffer.trampolineAt(virtualCallBegin);
169     trampolines->ctiVirtualConstruct = patchBuffer.trampolineAt(virtualConstructBegin);
170     trampolines->ctiNativeCall = patchBuffer.trampolineAt(nativeCallThunk);
171     trampolines->ctiNativeConstruct = patchBuffer.trampolineAt(nativeConstructThunk);
172     trampolines->ctiStringLengthTrampoline = patchBuffer.trampolineAt(stringLengthBegin);
173     trampolines->ctiVirtualCallLink = patchBuffer.trampolineAt(virtualCallLinkBegin);
174     trampolines->ctiVirtualConstructLink = patchBuffer.trampolineAt(virtualConstructLinkBegin);
175 #if ENABLE(JIT_USE_SOFT_MODULO)
176     trampolines->ctiSoftModulo = patchBuffer.trampolineAt(softModBegin);
177 #endif
178     
179     return executableMemory.release();
180 }
181
182 JIT::Label JIT::privateCompileCTINativeCall(JSGlobalData* globalData, bool isConstruct)
183 {
184     int executableOffsetToFunction = isConstruct ? OBJECT_OFFSETOF(NativeExecutable, m_constructor) : OBJECT_OFFSETOF(NativeExecutable, m_function);
185
186     Label nativeCallThunk = align();
187
188     emitPutImmediateToCallFrameHeader(0, RegisterFile::CodeBlock);
189
190 #if CPU(X86)
191     // Load caller frame's scope chain into this callframe so that whatever we call can
192     // get to its global data.
193     emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT0);
194     emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT0);
195     emitPutCellToCallFrameHeader(regT1, RegisterFile::ScopeChain);
196
197     peek(regT1);
198     emitPutToCallFrameHeader(regT1, RegisterFile::ReturnPC);
199
200     // Calling convention:      f(ecx, edx, ...);
201     // Host function signature: f(ExecState*);
202     move(callFrameRegister, X86Registers::ecx);
203
204     subPtr(TrustedImm32(16 - sizeof(void*)), stackPointerRegister); // Align stack after call.
205
206     // call the function
207     emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, regT1);
208     loadPtr(Address(regT1, OBJECT_OFFSETOF(JSFunction, m_executable)), regT1);
209     move(regT0, callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack.
210     call(Address(regT1, executableOffsetToFunction));
211
212     addPtr(TrustedImm32(16 - sizeof(void*)), stackPointerRegister);
213
214 #elif CPU(ARM)
215     // Load caller frame's scope chain into this callframe so that whatever we call can
216     // get to its global data.
217     emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT2);
218     emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT2);
219     emitPutCellToCallFrameHeader(regT1, RegisterFile::ScopeChain);
220
221     preserveReturnAddressAfterCall(regT3); // Callee preserved
222     emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
223
224     // Calling convention:      f(r0 == regT0, r1 == regT1, ...);
225     // Host function signature: f(ExecState*);
226     move(callFrameRegister, ARMRegisters::r0);
227
228     // call the function
229     emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, ARMRegisters::r1);
230     move(regT2, callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack.
231     loadPtr(Address(ARMRegisters::r1, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
232     call(Address(regT2, executableOffsetToFunction));
233
234     restoreReturnAddressBeforeReturn(regT3);
235 #elif CPU(SH4)
236     // Load caller frame's scope chain into this callframe so that whatever we call can
237     // get to its global data.
238     emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT2);
239     emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT2);
240     emitPutToCallFrameHeader(regT1, RegisterFile::ScopeChain);
241
242     preserveReturnAddressAfterCall(regT3); // Callee preserved
243     emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
244
245     // Calling convention: f(r0 == regT4, r1 == regT5, ...);
246     // Host function signature: f(ExecState*);
247     move(callFrameRegister, regT4);
248
249     emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, regT5);
250     move(regT2, callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack.
251     loadPtr(Address(regT5, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
252
253     call(Address(regT2, executableOffsetToFunction), regT0);
254     restoreReturnAddressBeforeReturn(regT3);
255 #elif CPU(MIPS)
256     // Load caller frame's scope chain into this callframe so that whatever we call can
257     // get to its global data.
258     emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT0);
259     emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT0);
260     emitPutCellToCallFrameHeader(regT1, RegisterFile::ScopeChain);
261
262     preserveReturnAddressAfterCall(regT3); // Callee preserved
263     emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
264
265     // Calling convention:      f(a0, a1, a2, a3);
266     // Host function signature: f(ExecState*);
267
268     // Allocate stack space for 16 bytes (8-byte aligned)
269     // 16 bytes (unused) for 4 arguments
270     subPtr(TrustedImm32(16), stackPointerRegister);
271
272     // Setup arg0
273     move(callFrameRegister, MIPSRegisters::a0);
274
275     // Call
276     emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, MIPSRegisters::a2);
277     loadPtr(Address(MIPSRegisters::a2, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
278     move(regT0, callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack.
279     call(Address(regT2, executableOffsetToFunction));
280
281     // Restore stack space
282     addPtr(TrustedImm32(16), stackPointerRegister);
283
284     restoreReturnAddressBeforeReturn(regT3);
285
286 #else
287 #error "JIT not supported on this platform."
288     UNUSED_PARAM(executableOffsetToFunction);
289     breakpoint();
290 #endif // CPU(X86)
291
292     // Check for an exception
293     Jump sawException = branch32(NotEqual, AbsoluteAddress(reinterpret_cast<char*>(&globalData->exception) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), TrustedImm32(JSValue::EmptyValueTag));
294
295     // Return.
296     ret();
297
298     // Handle an exception
299     sawException.link(this);
300
301     // Grab the return address.
302     preserveReturnAddressAfterCall(regT1);
303
304     move(TrustedImmPtr(&globalData->exceptionLocation), regT2);
305     storePtr(regT1, regT2);
306     poke(callFrameRegister, OBJECT_OFFSETOF(struct JITStackFrame, callFrame) / sizeof(void*));
307
308     // Set the return address.
309     move(TrustedImmPtr(FunctionPtr(ctiVMThrowTrampoline).value()), regT1);
310     restoreReturnAddressBeforeReturn(regT1);
311
312     ret();
313
314     return nativeCallThunk;
315 }
316
317 JIT::CodeRef JIT::privateCompileCTINativeCall(JSGlobalData* globalData, NativeFunction func)
318 {
319     Call nativeCall;
320
321     emitPutImmediateToCallFrameHeader(0, RegisterFile::CodeBlock);
322
323 #if CPU(X86)
324     // Load caller frame's scope chain into this callframe so that whatever we call can
325     // get to its global data.
326     emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT0);
327     emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT0);
328     emitPutCellToCallFrameHeader(regT1, RegisterFile::ScopeChain);
329
330     peek(regT1);
331     emitPutToCallFrameHeader(regT1, RegisterFile::ReturnPC);
332
333     // Calling convention:      f(ecx, edx, ...);
334     // Host function signature: f(ExecState*);
335     move(callFrameRegister, X86Registers::ecx);
336
337     subPtr(TrustedImm32(16 - sizeof(void*)), stackPointerRegister); // Align stack after call.
338
339     move(regT0, callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack.
340
341     // call the function
342     nativeCall = call();
343
344     addPtr(TrustedImm32(16 - sizeof(void*)), stackPointerRegister);
345
346 #elif CPU(ARM)
347     // Load caller frame's scope chain into this callframe so that whatever we call can
348     // get to its global data.
349     emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT2);
350     emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT2);
351     emitPutCellToCallFrameHeader(regT1, RegisterFile::ScopeChain);
352
353     preserveReturnAddressAfterCall(regT3); // Callee preserved
354     emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
355
356     // Calling convention:      f(r0 == regT0, r1 == regT1, ...);
357     // Host function signature: f(ExecState*);
358     move(callFrameRegister, ARMRegisters::r0);
359
360     emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, ARMRegisters::r1);
361     move(regT2, callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack.
362     loadPtr(Address(ARMRegisters::r1, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
363
364     // call the function
365     nativeCall = call();
366
367     restoreReturnAddressBeforeReturn(regT3);
368
369 #elif CPU(MIPS)
370     // Load caller frame's scope chain into this callframe so that whatever we call can
371     // get to its global data.
372     emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT0);
373     emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT0);
374     emitPutCellToCallFrameHeader(regT1, RegisterFile::ScopeChain);
375
376     preserveReturnAddressAfterCall(regT3); // Callee preserved
377     emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
378
379     // Calling convention:      f(a0, a1, a2, a3);
380     // Host function signature: f(ExecState*);
381
382     // Allocate stack space for 16 bytes (8-byte aligned)
383     // 16 bytes (unused) for 4 arguments
384     subPtr(TrustedImm32(16), stackPointerRegister);
385
386     // Setup arg0
387     move(callFrameRegister, MIPSRegisters::a0);
388
389     // Call
390     emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, MIPSRegisters::a2);
391     loadPtr(Address(MIPSRegisters::a2, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
392     move(regT0, callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack.
393     
394     // call the function
395     nativeCall = call();
396
397     // Restore stack space
398     addPtr(TrustedImm32(16), stackPointerRegister);
399
400     restoreReturnAddressBeforeReturn(regT3);
401 #elif CPU(SH4)
402     // Load caller frame's scope chain into this callframe so that whatever we call can
403     // get to its global data.
404     emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT2);
405     emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT2);
406     emitPutToCallFrameHeader(regT1, RegisterFile::ScopeChain);
407
408     preserveReturnAddressAfterCall(regT3); // Callee preserved
409     emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
410
411     // Calling convention: f(r0 == regT4, r1 == regT5, ...);
412     // Host function signature: f(ExecState*);
413     move(callFrameRegister, regT4);
414
415     emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, regT5);
416     move(regT2, callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack.
417     loadPtr(Address(regT5, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
418
419     // call the function
420     nativeCall = call();
421
422     restoreReturnAddressBeforeReturn(regT3);
423 #else
424 #error "JIT not supported on this platform."
425     breakpoint();
426 #endif // CPU(X86)
427
428     // Check for an exception
429     Jump sawException = branch32(NotEqual, AbsoluteAddress(reinterpret_cast<char*>(&globalData->exception) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), TrustedImm32(JSValue::EmptyValueTag));
430
431     // Return.
432     ret();
433
434     // Handle an exception
435     sawException.link(this);
436
437     // Grab the return address.
438     preserveReturnAddressAfterCall(regT1);
439
440     move(TrustedImmPtr(&globalData->exceptionLocation), regT2);
441     storePtr(regT1, regT2);
442     poke(callFrameRegister, OBJECT_OFFSETOF(struct JITStackFrame, callFrame) / sizeof(void*));
443
444     // Set the return address.
445     move(TrustedImmPtr(FunctionPtr(ctiVMThrowTrampoline).value()), regT1);
446     restoreReturnAddressBeforeReturn(regT1);
447
448     ret();
449
450     // All trampolines constructed! copy the code, link up calls, and set the pointers on the Machine object.
451     LinkBuffer patchBuffer(*m_globalData, this);
452
453     patchBuffer.link(nativeCall, FunctionPtr(func));
454     return patchBuffer.finalizeCode();
455 }
456
457 void JIT::emit_op_mov(Instruction* currentInstruction)
458 {
459     unsigned dst = currentInstruction[1].u.operand;
460     unsigned src = currentInstruction[2].u.operand;
461
462     if (m_codeBlock->isConstantRegisterIndex(src))
463         emitStore(dst, getConstantOperand(src));
464     else {
465         emitLoad(src, regT1, regT0);
466         emitStore(dst, regT1, regT0);
467         map(m_bytecodeOffset + OPCODE_LENGTH(op_mov), dst, regT1, regT0);
468     }
469 }
470
471 void JIT::emit_op_end(Instruction* currentInstruction)
472 {
473     ASSERT(returnValueRegister != callFrameRegister);
474     emitLoad(currentInstruction[1].u.operand, regT1, regT0);
475     restoreReturnAddressBeforeReturn(Address(callFrameRegister, RegisterFile::ReturnPC * static_cast<int>(sizeof(Register))));
476     ret();
477 }
478
479 void JIT::emit_op_jmp(Instruction* currentInstruction)
480 {
481     unsigned target = currentInstruction[1].u.operand;
482     addJump(jump(), target);
483 }
484
485 void JIT::emit_op_new_object(Instruction* currentInstruction)
486 {
487     emitAllocateJSFinalObject(ImmPtr(m_codeBlock->globalObject()->emptyObjectStructure()), regT0, regT1);
488     
489     emitStoreCell(currentInstruction[1].u.operand, regT0);
490 }
491
492 void JIT::emitSlow_op_new_object(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
493 {
494     linkSlowCase(iter);
495     JITStubCall(this, cti_op_new_object).call(currentInstruction[1].u.operand);
496 }
497
498 void JIT::emit_op_check_has_instance(Instruction* currentInstruction)
499 {
500     unsigned baseVal = currentInstruction[1].u.operand;
501
502     emitLoadPayload(baseVal, regT0);
503
504     // Check that baseVal is a cell.
505     emitJumpSlowCaseIfNotJSCell(baseVal);
506     
507     // Check that baseVal 'ImplementsHasInstance'.
508     loadPtr(Address(regT0, JSCell::structureOffset()), regT0);
509     addSlowCase(branchTest8(Zero, Address(regT0, Structure::typeInfoFlagsOffset()), TrustedImm32(ImplementsHasInstance)));
510 }
511
512 void JIT::emit_op_instanceof(Instruction* currentInstruction)
513 {
514     unsigned dst = currentInstruction[1].u.operand;
515     unsigned value = currentInstruction[2].u.operand;
516     unsigned baseVal = currentInstruction[3].u.operand;
517     unsigned proto = currentInstruction[4].u.operand;
518
519     // Load the operands into registers.
520     // We use regT0 for baseVal since we will be done with this first, and we can then use it for the result.
521     emitLoadPayload(value, regT2);
522     emitLoadPayload(baseVal, regT0);
523     emitLoadPayload(proto, regT1);
524
525     // Check that proto are cells.  baseVal must be a cell - this is checked by op_check_has_instance.
526     emitJumpSlowCaseIfNotJSCell(value);
527     emitJumpSlowCaseIfNotJSCell(proto);
528     
529     // Check that prototype is an object
530     loadPtr(Address(regT1, JSCell::structureOffset()), regT3);
531     addSlowCase(emitJumpIfNotObject(regT3));
532
533     // Fixme: this check is only needed because the JSC API allows HasInstance to be overridden; we should deprecate this.
534     // Check that baseVal 'ImplementsDefaultHasInstance'.
535     loadPtr(Address(regT0, JSCell::structureOffset()), regT0);
536     addSlowCase(branchTest8(Zero, Address(regT0, Structure::typeInfoFlagsOffset()), TrustedImm32(ImplementsDefaultHasInstance)));
537
538     // Optimistically load the result true, and start looping.
539     // Initially, regT1 still contains proto and regT2 still contains value.
540     // As we loop regT2 will be updated with its prototype, recursively walking the prototype chain.
541     move(TrustedImm32(1), regT0);
542     Label loop(this);
543
544     // Load the prototype of the cell in regT2.  If this is equal to regT1 - WIN!
545     // Otherwise, check if we've hit null - if we have then drop out of the loop, if not go again.
546     loadPtr(Address(regT2, JSCell::structureOffset()), regT2);
547     load32(Address(regT2, Structure::prototypeOffset() + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT2);
548     Jump isInstance = branchPtr(Equal, regT2, regT1);
549     branchTest32(NonZero, regT2).linkTo(loop, this);
550
551     // We get here either by dropping out of the loop, or if value was not an Object.  Result is false.
552     move(TrustedImm32(0), regT0);
553
554     // isInstance jumps right down to here, to skip setting the result to false (it has already set true).
555     isInstance.link(this);
556     emitStoreBool(dst, regT0);
557 }
558
559 void JIT::emitSlow_op_check_has_instance(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
560 {
561     unsigned baseVal = currentInstruction[1].u.operand;
562
563     linkSlowCaseIfNotJSCell(iter, baseVal);
564     linkSlowCase(iter);
565
566     JITStubCall stubCall(this, cti_op_check_has_instance);
567     stubCall.addArgument(baseVal);
568     stubCall.call();
569 }
570
571 void JIT::emitSlow_op_instanceof(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
572 {
573     unsigned dst = currentInstruction[1].u.operand;
574     unsigned value = currentInstruction[2].u.operand;
575     unsigned baseVal = currentInstruction[3].u.operand;
576     unsigned proto = currentInstruction[4].u.operand;
577
578     linkSlowCaseIfNotJSCell(iter, value);
579     linkSlowCaseIfNotJSCell(iter, proto);
580     linkSlowCase(iter);
581     linkSlowCase(iter);
582
583     JITStubCall stubCall(this, cti_op_instanceof);
584     stubCall.addArgument(value);
585     stubCall.addArgument(baseVal);
586     stubCall.addArgument(proto);
587     stubCall.call(dst);
588 }
589
590 void JIT::emit_op_tear_off_activation(Instruction* currentInstruction)
591 {
592     unsigned activation = currentInstruction[1].u.operand;
593     unsigned arguments = currentInstruction[2].u.operand;
594     Jump activationCreated = branch32(NotEqual, tagFor(activation), TrustedImm32(JSValue::EmptyValueTag));
595     Jump argumentsNotCreated = branch32(Equal, tagFor(arguments), TrustedImm32(JSValue::EmptyValueTag));
596     activationCreated.link(this);
597     JITStubCall stubCall(this, cti_op_tear_off_activation);
598     stubCall.addArgument(currentInstruction[1].u.operand);
599     stubCall.addArgument(unmodifiedArgumentsRegister(currentInstruction[2].u.operand));
600     stubCall.call();
601     argumentsNotCreated.link(this);
602 }
603
604 void JIT::emit_op_tear_off_arguments(Instruction* currentInstruction)
605 {
606     int dst = currentInstruction[1].u.operand;
607
608     Jump argsNotCreated = branch32(Equal, tagFor(unmodifiedArgumentsRegister(dst)), TrustedImm32(JSValue::EmptyValueTag));
609     JITStubCall stubCall(this, cti_op_tear_off_arguments);
610     stubCall.addArgument(unmodifiedArgumentsRegister(dst));
611     stubCall.call();
612     argsNotCreated.link(this);
613 }
614
615 void JIT::emit_op_resolve(Instruction* currentInstruction)
616 {
617     JITStubCall stubCall(this, cti_op_resolve);
618     stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
619     stubCall.call(currentInstruction[1].u.operand);
620 }
621
622 void JIT::emit_op_to_primitive(Instruction* currentInstruction)
623 {
624     int dst = currentInstruction[1].u.operand;
625     int src = currentInstruction[2].u.operand;
626
627     emitLoad(src, regT1, regT0);
628
629     Jump isImm = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
630     addSlowCase(branchPtr(NotEqual, Address(regT0), TrustedImmPtr(m_globalData->jsStringVPtr)));
631     isImm.link(this);
632
633     if (dst != src)
634         emitStore(dst, regT1, regT0);
635     map(m_bytecodeOffset + OPCODE_LENGTH(op_to_primitive), dst, regT1, regT0);
636 }
637
638 void JIT::emitSlow_op_to_primitive(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
639 {
640     int dst = currentInstruction[1].u.operand;
641
642     linkSlowCase(iter);
643
644     JITStubCall stubCall(this, cti_op_to_primitive);
645     stubCall.addArgument(regT1, regT0);
646     stubCall.call(dst);
647 }
648
649 void JIT::emit_op_strcat(Instruction* currentInstruction)
650 {
651     JITStubCall stubCall(this, cti_op_strcat);
652     stubCall.addArgument(Imm32(currentInstruction[2].u.operand));
653     stubCall.addArgument(Imm32(currentInstruction[3].u.operand));
654     stubCall.call(currentInstruction[1].u.operand);
655 }
656
657 void JIT::emit_op_resolve_base(Instruction* currentInstruction)
658 {
659     JITStubCall stubCall(this, currentInstruction[3].u.operand ? cti_op_resolve_base_strict_put : cti_op_resolve_base);
660     stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
661     stubCall.call(currentInstruction[1].u.operand);
662 }
663
664 void JIT::emit_op_ensure_property_exists(Instruction* currentInstruction)
665 {
666     JITStubCall stubCall(this, cti_op_ensure_property_exists);
667     stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
668     stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
669     stubCall.call(currentInstruction[1].u.operand);
670 }
671
672 void JIT::emit_op_resolve_skip(Instruction* currentInstruction)
673 {
674     JITStubCall stubCall(this, cti_op_resolve_skip);
675     stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
676     stubCall.addArgument(Imm32(currentInstruction[3].u.operand));
677     stubCall.call(currentInstruction[1].u.operand);
678 }
679
680 void JIT::emit_op_resolve_global(Instruction* currentInstruction, bool dynamic)
681 {
682     // FIXME: Optimize to use patching instead of so many memory accesses.
683
684     unsigned dst = currentInstruction[1].u.operand;
685     void* globalObject = m_codeBlock->globalObject();
686
687     unsigned currentIndex = m_globalResolveInfoIndex++;
688     GlobalResolveInfo* resolveInfoAddress = &m_codeBlock->globalResolveInfo(currentIndex);
689
690
691     // Verify structure.
692     move(TrustedImmPtr(globalObject), regT0);
693     move(TrustedImmPtr(resolveInfoAddress), regT3);
694     loadPtr(Address(regT3, OBJECT_OFFSETOF(GlobalResolveInfo, structure)), regT1);
695     addSlowCase(branchPtr(NotEqual, regT1, Address(regT0, JSCell::structureOffset())));
696
697     // Load property.
698     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSGlobalObject, m_propertyStorage)), regT2);
699     load32(Address(regT3, OBJECT_OFFSETOF(GlobalResolveInfo, offset)), regT3);
700     load32(BaseIndex(regT2, regT3, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT0); // payload
701     load32(BaseIndex(regT2, regT3, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT1); // tag
702     emitStore(dst, regT1, regT0);
703     map(m_bytecodeOffset + (dynamic ? OPCODE_LENGTH(op_resolve_global_dynamic) : OPCODE_LENGTH(op_resolve_global)), dst, regT1, regT0);
704 }
705
706 void JIT::emitSlow_op_resolve_global(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
707 {
708     unsigned dst = currentInstruction[1].u.operand;
709     Identifier* ident = &m_codeBlock->identifier(currentInstruction[2].u.operand);
710
711     unsigned currentIndex = m_globalResolveInfoIndex++;
712
713     linkSlowCase(iter);
714     JITStubCall stubCall(this, cti_op_resolve_global);
715     stubCall.addArgument(TrustedImmPtr(ident));
716     stubCall.addArgument(Imm32(currentIndex));
717     stubCall.call(dst);
718 }
719
720 void JIT::emit_op_not(Instruction* currentInstruction)
721 {
722     unsigned dst = currentInstruction[1].u.operand;
723     unsigned src = currentInstruction[2].u.operand;
724
725     emitLoadTag(src, regT0);
726
727     emitLoad(src, regT1, regT0);
728     addSlowCase(branch32(NotEqual, regT1, TrustedImm32(JSValue::BooleanTag)));
729     xor32(TrustedImm32(1), regT0);
730
731     emitStoreBool(dst, regT0, (dst == src));
732 }
733
734 void JIT::emitSlow_op_not(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
735 {
736     unsigned dst = currentInstruction[1].u.operand;
737     unsigned src = currentInstruction[2].u.operand;
738
739     linkSlowCase(iter);
740
741     JITStubCall stubCall(this, cti_op_not);
742     stubCall.addArgument(src);
743     stubCall.call(dst);
744 }
745
746 void JIT::emit_op_jfalse(Instruction* currentInstruction)
747 {
748     unsigned cond = currentInstruction[1].u.operand;
749     unsigned target = currentInstruction[2].u.operand;
750
751     emitLoad(cond, regT1, regT0);
752
753     ASSERT((JSValue::BooleanTag + 1 == JSValue::Int32Tag) && !(JSValue::Int32Tag + 1));
754     addSlowCase(branch32(Below, regT1, TrustedImm32(JSValue::BooleanTag)));
755     addJump(branchTest32(Zero, regT0), target);
756 }
757
758 void JIT::emitSlow_op_jfalse(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
759 {
760     unsigned cond = currentInstruction[1].u.operand;
761     unsigned target = currentInstruction[2].u.operand;
762
763     linkSlowCase(iter);
764
765     if (supportsFloatingPoint()) {
766         // regT1 contains the tag from the hot path.
767         Jump notNumber = branch32(Above, regT1, Imm32(JSValue::LowestTag));
768
769         emitLoadDouble(cond, fpRegT0);
770         emitJumpSlowToHot(branchDoubleZeroOrNaN(fpRegT0, fpRegT1), target);
771         emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_jfalse));
772
773         notNumber.link(this);
774     }
775
776     JITStubCall stubCall(this, cti_op_jtrue);
777     stubCall.addArgument(cond);
778     stubCall.call();
779     emitJumpSlowToHot(branchTest32(Zero, regT0), target); // Inverted.
780 }
781
782 void JIT::emit_op_jtrue(Instruction* currentInstruction)
783 {
784     unsigned cond = currentInstruction[1].u.operand;
785     unsigned target = currentInstruction[2].u.operand;
786
787     emitLoad(cond, regT1, regT0);
788
789     ASSERT((JSValue::BooleanTag + 1 == JSValue::Int32Tag) && !(JSValue::Int32Tag + 1));
790     addSlowCase(branch32(Below, regT1, TrustedImm32(JSValue::BooleanTag)));
791     addJump(branchTest32(NonZero, regT0), target);
792 }
793
794 void JIT::emitSlow_op_jtrue(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
795 {
796     unsigned cond = currentInstruction[1].u.operand;
797     unsigned target = currentInstruction[2].u.operand;
798
799     linkSlowCase(iter);
800
801     if (supportsFloatingPoint()) {
802         // regT1 contains the tag from the hot path.
803         Jump notNumber = branch32(Above, regT1, Imm32(JSValue::LowestTag));
804
805         emitLoadDouble(cond, fpRegT0);
806         emitJumpSlowToHot(branchDoubleNonZero(fpRegT0, fpRegT1), target);
807         emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_jtrue));
808
809         notNumber.link(this);
810     }
811
812     JITStubCall stubCall(this, cti_op_jtrue);
813     stubCall.addArgument(cond);
814     stubCall.call();
815     emitJumpSlowToHot(branchTest32(NonZero, regT0), target);
816 }
817
818 void JIT::emit_op_jeq_null(Instruction* currentInstruction)
819 {
820     unsigned src = currentInstruction[1].u.operand;
821     unsigned target = currentInstruction[2].u.operand;
822
823     emitLoad(src, regT1, regT0);
824
825     Jump isImmediate = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
826
827     // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
828     loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
829     addJump(branchTest8(NonZero, Address(regT2, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined)), target);
830
831     Jump wasNotImmediate = jump();
832
833     // Now handle the immediate cases - undefined & null
834     isImmediate.link(this);
835
836     ASSERT((JSValue::UndefinedTag + 1 == JSValue::NullTag) && (JSValue::NullTag & 0x1));
837     or32(TrustedImm32(1), regT1);
838     addJump(branch32(Equal, regT1, TrustedImm32(JSValue::NullTag)), target);
839
840     wasNotImmediate.link(this);
841 }
842
843 void JIT::emit_op_jneq_null(Instruction* currentInstruction)
844 {
845     unsigned src = currentInstruction[1].u.operand;
846     unsigned target = currentInstruction[2].u.operand;
847
848     emitLoad(src, regT1, regT0);
849
850     Jump isImmediate = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
851
852     // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
853     loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
854     addJump(branchTest8(Zero, Address(regT2, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined)), target);
855
856     Jump wasNotImmediate = jump();
857
858     // Now handle the immediate cases - undefined & null
859     isImmediate.link(this);
860
861     ASSERT((JSValue::UndefinedTag + 1 == JSValue::NullTag) && (JSValue::NullTag & 0x1));
862     or32(TrustedImm32(1), regT1);
863     addJump(branch32(NotEqual, regT1, TrustedImm32(JSValue::NullTag)), target);
864
865     wasNotImmediate.link(this);
866 }
867
868 void JIT::emit_op_jneq_ptr(Instruction* currentInstruction)
869 {
870     unsigned src = currentInstruction[1].u.operand;
871     JSCell* ptr = currentInstruction[2].u.jsCell.get();
872     unsigned target = currentInstruction[3].u.operand;
873
874     emitLoad(src, regT1, regT0);
875     addJump(branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag)), target);
876     addJump(branchPtr(NotEqual, regT0, TrustedImmPtr(ptr)), target);
877 }
878
879 void JIT::emit_op_jsr(Instruction* currentInstruction)
880 {
881     int retAddrDst = currentInstruction[1].u.operand;
882     int target = currentInstruction[2].u.operand;
883     DataLabelPtr storeLocation = storePtrWithPatch(TrustedImmPtr(0), Address(callFrameRegister, sizeof(Register) * retAddrDst));
884     addJump(jump(), target);
885     m_jsrSites.append(JSRInfo(storeLocation, label()));
886 }
887
888 void JIT::emit_op_sret(Instruction* currentInstruction)
889 {
890     jump(Address(callFrameRegister, sizeof(Register) * currentInstruction[1].u.operand));
891 }
892
893 void JIT::emit_op_eq(Instruction* currentInstruction)
894 {
895     unsigned dst = currentInstruction[1].u.operand;
896     unsigned src1 = currentInstruction[2].u.operand;
897     unsigned src2 = currentInstruction[3].u.operand;
898
899     emitLoad2(src1, regT1, regT0, src2, regT3, regT2);
900     addSlowCase(branch32(NotEqual, regT1, regT3));
901     addSlowCase(branch32(Equal, regT1, TrustedImm32(JSValue::CellTag)));
902     addSlowCase(branch32(Below, regT1, TrustedImm32(JSValue::LowestTag)));
903
904     compare32(Equal, regT0, regT2, regT0);
905
906     emitStoreBool(dst, regT0);
907 }
908
909 void JIT::emitSlow_op_eq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
910 {
911     unsigned dst = currentInstruction[1].u.operand;
912     unsigned op1 = currentInstruction[2].u.operand;
913     unsigned op2 = currentInstruction[3].u.operand;
914
915     JumpList storeResult;
916     JumpList genericCase;
917
918     genericCase.append(getSlowCase(iter)); // tags not equal
919
920     linkSlowCase(iter); // tags equal and JSCell
921     genericCase.append(branchPtr(NotEqual, Address(regT0), TrustedImmPtr(m_globalData->jsStringVPtr)));
922     genericCase.append(branchPtr(NotEqual, Address(regT2), TrustedImmPtr(m_globalData->jsStringVPtr)));
923
924     // String case.
925     JITStubCall stubCallEqStrings(this, cti_op_eq_strings);
926     stubCallEqStrings.addArgument(regT0);
927     stubCallEqStrings.addArgument(regT2);
928     stubCallEqStrings.call();
929     storeResult.append(jump());
930
931     // Generic case.
932     genericCase.append(getSlowCase(iter)); // doubles
933     genericCase.link(this);
934     JITStubCall stubCallEq(this, cti_op_eq);
935     stubCallEq.addArgument(op1);
936     stubCallEq.addArgument(op2);
937     stubCallEq.call(regT0);
938
939     storeResult.link(this);
940     emitStoreBool(dst, regT0);
941 }
942
943 void JIT::emit_op_neq(Instruction* currentInstruction)
944 {
945     unsigned dst = currentInstruction[1].u.operand;
946     unsigned src1 = currentInstruction[2].u.operand;
947     unsigned src2 = currentInstruction[3].u.operand;
948
949     emitLoad2(src1, regT1, regT0, src2, regT3, regT2);
950     addSlowCase(branch32(NotEqual, regT1, regT3));
951     addSlowCase(branch32(Equal, regT1, TrustedImm32(JSValue::CellTag)));
952     addSlowCase(branch32(Below, regT1, TrustedImm32(JSValue::LowestTag)));
953
954     compare32(NotEqual, regT0, regT2, regT0);
955
956     emitStoreBool(dst, regT0);
957 }
958
959 void JIT::emitSlow_op_neq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
960 {
961     unsigned dst = currentInstruction[1].u.operand;
962
963     JumpList storeResult;
964     JumpList genericCase;
965
966     genericCase.append(getSlowCase(iter)); // tags not equal
967
968     linkSlowCase(iter); // tags equal and JSCell
969     genericCase.append(branchPtr(NotEqual, Address(regT0), TrustedImmPtr(m_globalData->jsStringVPtr)));
970     genericCase.append(branchPtr(NotEqual, Address(regT2), TrustedImmPtr(m_globalData->jsStringVPtr)));
971
972     // String case.
973     JITStubCall stubCallEqStrings(this, cti_op_eq_strings);
974     stubCallEqStrings.addArgument(regT0);
975     stubCallEqStrings.addArgument(regT2);
976     stubCallEqStrings.call(regT0);
977     storeResult.append(jump());
978
979     // Generic case.
980     genericCase.append(getSlowCase(iter)); // doubles
981     genericCase.link(this);
982     JITStubCall stubCallEq(this, cti_op_eq);
983     stubCallEq.addArgument(regT1, regT0);
984     stubCallEq.addArgument(regT3, regT2);
985     stubCallEq.call(regT0);
986
987     storeResult.link(this);
988     xor32(TrustedImm32(0x1), regT0);
989     emitStoreBool(dst, regT0);
990 }
991
992 void JIT::compileOpStrictEq(Instruction* currentInstruction, CompileOpStrictEqType type)
993 {
994     unsigned dst = currentInstruction[1].u.operand;
995     unsigned src1 = currentInstruction[2].u.operand;
996     unsigned src2 = currentInstruction[3].u.operand;
997
998     emitLoad2(src1, regT1, regT0, src2, regT3, regT2);
999
1000     // Bail if the tags differ, or are double.
1001     addSlowCase(branch32(NotEqual, regT1, regT3));
1002     addSlowCase(branch32(Below, regT1, TrustedImm32(JSValue::LowestTag)));
1003
1004     // Jump to a slow case if both are strings.
1005     Jump notCell = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
1006     Jump firstNotString = branchPtr(NotEqual, Address(regT0), TrustedImmPtr(m_globalData->jsStringVPtr));
1007     addSlowCase(branchPtr(Equal, Address(regT2), TrustedImmPtr(m_globalData->jsStringVPtr)));
1008     notCell.link(this);
1009     firstNotString.link(this);
1010
1011     // Simply compare the payloads.
1012     if (type == OpStrictEq)
1013         compare32(Equal, regT0, regT2, regT0);
1014     else
1015         compare32(NotEqual, regT0, regT2, regT0);
1016
1017     emitStoreBool(dst, regT0);
1018 }
1019
1020 void JIT::emit_op_stricteq(Instruction* currentInstruction)
1021 {
1022     compileOpStrictEq(currentInstruction, OpStrictEq);
1023 }
1024
1025 void JIT::emitSlow_op_stricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1026 {
1027     unsigned dst = currentInstruction[1].u.operand;
1028     unsigned src1 = currentInstruction[2].u.operand;
1029     unsigned src2 = currentInstruction[3].u.operand;
1030
1031     linkSlowCase(iter);
1032     linkSlowCase(iter);
1033     linkSlowCase(iter);
1034
1035     JITStubCall stubCall(this, cti_op_stricteq);
1036     stubCall.addArgument(src1);
1037     stubCall.addArgument(src2);
1038     stubCall.call(dst);
1039 }
1040
1041 void JIT::emit_op_nstricteq(Instruction* currentInstruction)
1042 {
1043     compileOpStrictEq(currentInstruction, OpNStrictEq);
1044 }
1045
1046 void JIT::emitSlow_op_nstricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1047 {
1048     unsigned dst = currentInstruction[1].u.operand;
1049     unsigned src1 = currentInstruction[2].u.operand;
1050     unsigned src2 = currentInstruction[3].u.operand;
1051
1052     linkSlowCase(iter);
1053     linkSlowCase(iter);
1054     linkSlowCase(iter);
1055
1056     JITStubCall stubCall(this, cti_op_nstricteq);
1057     stubCall.addArgument(src1);
1058     stubCall.addArgument(src2);
1059     stubCall.call(dst);
1060 }
1061
1062 void JIT::emit_op_eq_null(Instruction* currentInstruction)
1063 {
1064     unsigned dst = currentInstruction[1].u.operand;
1065     unsigned src = currentInstruction[2].u.operand;
1066
1067     emitLoad(src, regT1, regT0);
1068     Jump isImmediate = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
1069
1070     loadPtr(Address(regT0, JSCell::structureOffset()), regT1);
1071     test8(NonZero, Address(regT1, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined), regT1);
1072
1073     Jump wasNotImmediate = jump();
1074
1075     isImmediate.link(this);
1076
1077     compare32(Equal, regT1, TrustedImm32(JSValue::NullTag), regT2);
1078     compare32(Equal, regT1, TrustedImm32(JSValue::UndefinedTag), regT1);
1079     or32(regT2, regT1);
1080
1081     wasNotImmediate.link(this);
1082
1083     emitStoreBool(dst, regT1);
1084 }
1085
1086 void JIT::emit_op_neq_null(Instruction* currentInstruction)
1087 {
1088     unsigned dst = currentInstruction[1].u.operand;
1089     unsigned src = currentInstruction[2].u.operand;
1090
1091     emitLoad(src, regT1, regT0);
1092     Jump isImmediate = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
1093
1094     loadPtr(Address(regT0, JSCell::structureOffset()), regT1);
1095     test8(Zero, Address(regT1, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined), regT1);
1096
1097     Jump wasNotImmediate = jump();
1098
1099     isImmediate.link(this);
1100
1101     compare32(NotEqual, regT1, TrustedImm32(JSValue::NullTag), regT2);
1102     compare32(NotEqual, regT1, TrustedImm32(JSValue::UndefinedTag), regT1);
1103     and32(regT2, regT1);
1104
1105     wasNotImmediate.link(this);
1106
1107     emitStoreBool(dst, regT1);
1108 }
1109
1110 void JIT::emit_op_resolve_with_base(Instruction* currentInstruction)
1111 {
1112     JITStubCall stubCall(this, cti_op_resolve_with_base);
1113     stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[3].u.operand)));
1114     stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
1115     stubCall.call(currentInstruction[2].u.operand);
1116 }
1117
1118 void JIT::emit_op_resolve_with_this(Instruction* currentInstruction)
1119 {
1120     JITStubCall stubCall(this, cti_op_resolve_with_this);
1121     stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[3].u.operand)));
1122     stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
1123     stubCall.call(currentInstruction[2].u.operand);
1124 }
1125
1126 void JIT::emit_op_throw(Instruction* currentInstruction)
1127 {
1128     unsigned exception = currentInstruction[1].u.operand;
1129     JITStubCall stubCall(this, cti_op_throw);
1130     stubCall.addArgument(exception);
1131     stubCall.call();
1132
1133 #ifndef NDEBUG
1134     // cti_op_throw always changes it's return address,
1135     // this point in the code should never be reached.
1136     breakpoint();
1137 #endif
1138 }
1139
1140 void JIT::emit_op_get_pnames(Instruction* currentInstruction)
1141 {
1142     int dst = currentInstruction[1].u.operand;
1143     int base = currentInstruction[2].u.operand;
1144     int i = currentInstruction[3].u.operand;
1145     int size = currentInstruction[4].u.operand;
1146     int breakTarget = currentInstruction[5].u.operand;
1147
1148     JumpList isNotObject;
1149
1150     emitLoad(base, regT1, regT0);
1151     if (!m_codeBlock->isKnownNotImmediate(base))
1152         isNotObject.append(branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag)));
1153     if (base != m_codeBlock->thisRegister() || m_codeBlock->isStrictMode()) {
1154         loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
1155         isNotObject.append(emitJumpIfNotObject(regT2));
1156     }
1157
1158     // We could inline the case where you have a valid cache, but
1159     // this call doesn't seem to be hot.
1160     Label isObject(this);
1161     JITStubCall getPnamesStubCall(this, cti_op_get_pnames);
1162     getPnamesStubCall.addArgument(regT0);
1163     getPnamesStubCall.call(dst);
1164     load32(Address(regT0, OBJECT_OFFSETOF(JSPropertyNameIterator, m_jsStringsSize)), regT3);
1165     store32(TrustedImm32(Int32Tag), intTagFor(i));
1166     store32(TrustedImm32(0), intPayloadFor(i));
1167     store32(TrustedImm32(Int32Tag), intTagFor(size));
1168     store32(regT3, payloadFor(size));
1169     Jump end = jump();
1170
1171     isNotObject.link(this);
1172     addJump(branch32(Equal, regT1, TrustedImm32(JSValue::NullTag)), breakTarget);
1173     addJump(branch32(Equal, regT1, TrustedImm32(JSValue::UndefinedTag)), breakTarget);
1174     JITStubCall toObjectStubCall(this, cti_to_object);
1175     toObjectStubCall.addArgument(regT1, regT0);
1176     toObjectStubCall.call(base);
1177     jump().linkTo(isObject, this);
1178
1179     end.link(this);
1180 }
1181
1182 void JIT::emit_op_next_pname(Instruction* currentInstruction)
1183 {
1184     int dst = currentInstruction[1].u.operand;
1185     int base = currentInstruction[2].u.operand;
1186     int i = currentInstruction[3].u.operand;
1187     int size = currentInstruction[4].u.operand;
1188     int it = currentInstruction[5].u.operand;
1189     int target = currentInstruction[6].u.operand;
1190
1191     JumpList callHasProperty;
1192
1193     Label begin(this);
1194     load32(intPayloadFor(i), regT0);
1195     Jump end = branch32(Equal, regT0, intPayloadFor(size));
1196
1197     // Grab key @ i
1198     loadPtr(payloadFor(it), regT1);
1199     loadPtr(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_jsStrings)), regT2);
1200     load32(BaseIndex(regT2, regT0, TimesEight), regT2);
1201     store32(TrustedImm32(JSValue::CellTag), tagFor(dst));
1202     store32(regT2, payloadFor(dst));
1203
1204     // Increment i
1205     add32(TrustedImm32(1), regT0);
1206     store32(regT0, intPayloadFor(i));
1207
1208     // Verify that i is valid:
1209     loadPtr(payloadFor(base), regT0);
1210
1211     // Test base's structure
1212     loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
1213     callHasProperty.append(branchPtr(NotEqual, regT2, Address(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_cachedStructure)))));
1214
1215     // Test base's prototype chain
1216     loadPtr(Address(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_cachedPrototypeChain))), regT3);
1217     loadPtr(Address(regT3, OBJECT_OFFSETOF(StructureChain, m_vector)), regT3);
1218     addJump(branchTestPtr(Zero, Address(regT3)), target);
1219
1220     Label checkPrototype(this);
1221     callHasProperty.append(branch32(Equal, Address(regT2, Structure::prototypeOffset() + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), TrustedImm32(JSValue::NullTag)));
1222     loadPtr(Address(regT2, Structure::prototypeOffset() + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT2);
1223     loadPtr(Address(regT2, JSCell::structureOffset()), regT2);
1224     callHasProperty.append(branchPtr(NotEqual, regT2, Address(regT3)));
1225     addPtr(TrustedImm32(sizeof(Structure*)), regT3);
1226     branchTestPtr(NonZero, Address(regT3)).linkTo(checkPrototype, this);
1227
1228     // Continue loop.
1229     addJump(jump(), target);
1230
1231     // Slow case: Ask the object if i is valid.
1232     callHasProperty.link(this);
1233     loadPtr(addressFor(dst), regT1);
1234     JITStubCall stubCall(this, cti_has_property);
1235     stubCall.addArgument(regT0);
1236     stubCall.addArgument(regT1);
1237     stubCall.call();
1238
1239     // Test for valid key.
1240     addJump(branchTest32(NonZero, regT0), target);
1241     jump().linkTo(begin, this);
1242
1243     // End of loop.
1244     end.link(this);
1245 }
1246
1247 void JIT::emit_op_push_scope(Instruction* currentInstruction)
1248 {
1249     JITStubCall stubCall(this, cti_op_push_scope);
1250     stubCall.addArgument(currentInstruction[1].u.operand);
1251     stubCall.call(currentInstruction[1].u.operand);
1252 }
1253
1254 void JIT::emit_op_pop_scope(Instruction*)
1255 {
1256     JITStubCall(this, cti_op_pop_scope).call();
1257 }
1258
1259 void JIT::emit_op_to_jsnumber(Instruction* currentInstruction)
1260 {
1261     int dst = currentInstruction[1].u.operand;
1262     int src = currentInstruction[2].u.operand;
1263
1264     emitLoad(src, regT1, regT0);
1265
1266     Jump isInt32 = branch32(Equal, regT1, TrustedImm32(JSValue::Int32Tag));
1267     addSlowCase(branch32(AboveOrEqual, regT1, TrustedImm32(JSValue::EmptyValueTag)));
1268     isInt32.link(this);
1269
1270     if (src != dst)
1271         emitStore(dst, regT1, regT0);
1272     map(m_bytecodeOffset + OPCODE_LENGTH(op_to_jsnumber), dst, regT1, regT0);
1273 }
1274
1275 void JIT::emitSlow_op_to_jsnumber(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1276 {
1277     int dst = currentInstruction[1].u.operand;
1278
1279     linkSlowCase(iter);
1280
1281     JITStubCall stubCall(this, cti_op_to_jsnumber);
1282     stubCall.addArgument(regT1, regT0);
1283     stubCall.call(dst);
1284 }
1285
1286 void JIT::emit_op_push_new_scope(Instruction* currentInstruction)
1287 {
1288     JITStubCall stubCall(this, cti_op_push_new_scope);
1289     stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
1290     stubCall.addArgument(currentInstruction[3].u.operand);
1291     stubCall.call(currentInstruction[1].u.operand);
1292 }
1293
1294 void JIT::emit_op_catch(Instruction* currentInstruction)
1295 {
1296     // cti_op_throw returns the callFrame for the handler.
1297     move(regT0, callFrameRegister);
1298
1299     // Now store the exception returned by cti_op_throw.
1300     loadPtr(Address(stackPointerRegister, OBJECT_OFFSETOF(struct JITStackFrame, globalData)), regT3);
1301     load32(Address(regT3, OBJECT_OFFSETOF(JSGlobalData, exception) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT0);
1302     load32(Address(regT3, OBJECT_OFFSETOF(JSGlobalData, exception) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT1);
1303     store32(TrustedImm32(JSValue().payload()), Address(regT3, OBJECT_OFFSETOF(JSGlobalData, exception) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)));
1304     store32(TrustedImm32(JSValue().tag()), Address(regT3, OBJECT_OFFSETOF(JSGlobalData, exception) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)));
1305
1306     unsigned exception = currentInstruction[1].u.operand;
1307     emitStore(exception, regT1, regT0);
1308     map(m_bytecodeOffset + OPCODE_LENGTH(op_catch), exception, regT1, regT0);
1309 }
1310
1311 void JIT::emit_op_jmp_scopes(Instruction* currentInstruction)
1312 {
1313     JITStubCall stubCall(this, cti_op_jmp_scopes);
1314     stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
1315     stubCall.call();
1316     addJump(jump(), currentInstruction[2].u.operand);
1317 }
1318
1319 void JIT::emit_op_switch_imm(Instruction* currentInstruction)
1320 {
1321     unsigned tableIndex = currentInstruction[1].u.operand;
1322     unsigned defaultOffset = currentInstruction[2].u.operand;
1323     unsigned scrutinee = currentInstruction[3].u.operand;
1324
1325     // create jump table for switch destinations, track this switch statement.
1326     SimpleJumpTable* jumpTable = &m_codeBlock->immediateSwitchJumpTable(tableIndex);
1327     m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset, SwitchRecord::Immediate));
1328     jumpTable->ctiOffsets.grow(jumpTable->branchOffsets.size());
1329
1330     JITStubCall stubCall(this, cti_op_switch_imm);
1331     stubCall.addArgument(scrutinee);
1332     stubCall.addArgument(Imm32(tableIndex));
1333     stubCall.call();
1334     jump(regT0);
1335 }
1336
1337 void JIT::emit_op_switch_char(Instruction* currentInstruction)
1338 {
1339     unsigned tableIndex = currentInstruction[1].u.operand;
1340     unsigned defaultOffset = currentInstruction[2].u.operand;
1341     unsigned scrutinee = currentInstruction[3].u.operand;
1342
1343     // create jump table for switch destinations, track this switch statement.
1344     SimpleJumpTable* jumpTable = &m_codeBlock->characterSwitchJumpTable(tableIndex);
1345     m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset, SwitchRecord::Character));
1346     jumpTable->ctiOffsets.grow(jumpTable->branchOffsets.size());
1347
1348     JITStubCall stubCall(this, cti_op_switch_char);
1349     stubCall.addArgument(scrutinee);
1350     stubCall.addArgument(Imm32(tableIndex));
1351     stubCall.call();
1352     jump(regT0);
1353 }
1354
1355 void JIT::emit_op_switch_string(Instruction* currentInstruction)
1356 {
1357     unsigned tableIndex = currentInstruction[1].u.operand;
1358     unsigned defaultOffset = currentInstruction[2].u.operand;
1359     unsigned scrutinee = currentInstruction[3].u.operand;
1360
1361     // create jump table for switch destinations, track this switch statement.
1362     StringJumpTable* jumpTable = &m_codeBlock->stringSwitchJumpTable(tableIndex);
1363     m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset));
1364
1365     JITStubCall stubCall(this, cti_op_switch_string);
1366     stubCall.addArgument(scrutinee);
1367     stubCall.addArgument(Imm32(tableIndex));
1368     stubCall.call();
1369     jump(regT0);
1370 }
1371
1372 void JIT::emit_op_throw_reference_error(Instruction* currentInstruction)
1373 {
1374     unsigned message = currentInstruction[1].u.operand;
1375
1376     JITStubCall stubCall(this, cti_op_throw_reference_error);
1377     stubCall.addArgument(m_codeBlock->getConstant(message));
1378     stubCall.call();
1379 }
1380
1381 void JIT::emit_op_debug(Instruction* currentInstruction)
1382 {
1383 #if ENABLE(DEBUG_WITH_BREAKPOINT)
1384     UNUSED_PARAM(currentInstruction);
1385     breakpoint();
1386 #else
1387     JITStubCall stubCall(this, cti_op_debug);
1388     stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
1389     stubCall.addArgument(Imm32(currentInstruction[2].u.operand));
1390     stubCall.addArgument(Imm32(currentInstruction[3].u.operand));
1391     stubCall.call();
1392 #endif
1393 }
1394
1395
1396 void JIT::emit_op_enter(Instruction*)
1397 {
1398     // Even though JIT code doesn't use them, we initialize our constant
1399     // registers to zap stale pointers, to avoid unnecessarily prolonging
1400     // object lifetime and increasing GC pressure.
1401     for (int i = 0; i < m_codeBlock->m_numVars; ++i)
1402         emitStore(i, jsUndefined());
1403 }
1404
1405 void JIT::emit_op_create_activation(Instruction* currentInstruction)
1406 {
1407     unsigned activation = currentInstruction[1].u.operand;
1408     
1409     Jump activationCreated = branch32(NotEqual, tagFor(activation), TrustedImm32(JSValue::EmptyValueTag));
1410     JITStubCall(this, cti_op_push_activation).call(activation);
1411     activationCreated.link(this);
1412 }
1413
1414 void JIT::emit_op_create_arguments(Instruction* currentInstruction)
1415 {
1416     unsigned dst = currentInstruction[1].u.operand;
1417
1418     Jump argsCreated = branch32(NotEqual, tagFor(dst), TrustedImm32(JSValue::EmptyValueTag));
1419
1420     if (m_codeBlock->m_numParameters == 1)
1421         JITStubCall(this, cti_op_create_arguments_no_params).call();
1422     else
1423         JITStubCall(this, cti_op_create_arguments).call();
1424
1425     emitStore(dst, regT1, regT0);
1426     emitStore(unmodifiedArgumentsRegister(dst), regT1, regT0);
1427
1428     argsCreated.link(this);
1429 }
1430
1431 void JIT::emit_op_init_lazy_reg(Instruction* currentInstruction)
1432 {
1433     unsigned dst = currentInstruction[1].u.operand;
1434
1435     emitStore(dst, JSValue());
1436 }
1437
1438 void JIT::emit_op_get_callee(Instruction* currentInstruction)
1439 {
1440     int dst = currentInstruction[1].u.operand;
1441     emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, regT0);
1442     emitStoreCell(dst, regT0);
1443 }
1444
1445 void JIT::emit_op_create_this(Instruction* currentInstruction)
1446 {
1447     emitLoad(currentInstruction[2].u.operand, regT1, regT0);
1448     emitJumpSlowCaseIfNotJSCell(currentInstruction[2].u.operand, regT1);
1449     loadPtr(Address(regT0, JSCell::structureOffset()), regT1);
1450     addSlowCase(emitJumpIfNotObject(regT1));
1451     
1452     // now we know that the prototype is an object, but we don't know if it's got an
1453     // inheritor ID
1454     
1455     loadPtr(Address(regT0, JSObject::offsetOfInheritorID()), regT2);
1456     addSlowCase(branchTestPtr(Zero, regT2));
1457     
1458     // now regT2 contains the inheritorID, which is the structure that the newly
1459     // allocated object will have.
1460     
1461     emitAllocateJSFinalObject(regT2, regT0, regT1);
1462
1463     emitStoreCell(currentInstruction[1].u.operand, regT0);
1464 }
1465
1466 void JIT::emitSlow_op_create_this(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1467 {
1468     linkSlowCaseIfNotJSCell(iter, currentInstruction[2].u.operand); // not a cell
1469     linkSlowCase(iter); // not an object
1470     linkSlowCase(iter); // doesn't have an inheritor ID
1471     linkSlowCase(iter); // allocation failed
1472     unsigned protoRegister = currentInstruction[2].u.operand;
1473     emitLoad(protoRegister, regT1, regT0);
1474     JITStubCall stubCall(this, cti_op_create_this);
1475     stubCall.addArgument(regT1, regT0);
1476     stubCall.call(currentInstruction[1].u.operand);
1477 }
1478
1479 void JIT::emit_op_convert_this(Instruction* currentInstruction)
1480 {
1481     unsigned thisRegister = currentInstruction[1].u.operand;
1482
1483     emitLoad(thisRegister, regT1, regT0);
1484
1485     addSlowCase(branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag)));
1486     addSlowCase(branchPtr(Equal, Address(regT0), TrustedImmPtr(m_globalData->jsStringVPtr)));
1487
1488     map(m_bytecodeOffset + OPCODE_LENGTH(op_convert_this), thisRegister, regT1, regT0);
1489 }
1490
1491 void JIT::emitSlow_op_convert_this(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1492 {
1493     void* globalThis = m_codeBlock->globalObject()->globalScopeChain()->globalThis.get();
1494     unsigned thisRegister = currentInstruction[1].u.operand;
1495
1496     linkSlowCase(iter);
1497     Jump isNotUndefined = branch32(NotEqual, regT1, TrustedImm32(JSValue::UndefinedTag));
1498     move(TrustedImmPtr(globalThis), regT0);
1499     move(TrustedImm32(JSValue::CellTag), regT1);
1500     emitStore(thisRegister, regT1, regT0);
1501     emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_convert_this));
1502
1503     isNotUndefined.link(this);
1504     linkSlowCase(iter);
1505     JITStubCall stubCall(this, cti_op_convert_this);
1506     stubCall.addArgument(regT1, regT0);
1507     stubCall.call(thisRegister);
1508 }
1509
1510 void JIT::emit_op_profile_will_call(Instruction* currentInstruction)
1511 {
1512     peek(regT2, OBJECT_OFFSETOF(JITStackFrame, enabledProfilerReference) / sizeof(void*));
1513     Jump noProfiler = branchTestPtr(Zero, Address(regT2));
1514
1515     JITStubCall stubCall(this, cti_op_profile_will_call);
1516     stubCall.addArgument(currentInstruction[1].u.operand);
1517     stubCall.call();
1518     noProfiler.link(this);
1519 }
1520
1521 void JIT::emit_op_profile_did_call(Instruction* currentInstruction)
1522 {
1523     peek(regT2, OBJECT_OFFSETOF(JITStackFrame, enabledProfilerReference) / sizeof(void*));
1524     Jump noProfiler = branchTestPtr(Zero, Address(regT2));
1525
1526     JITStubCall stubCall(this, cti_op_profile_did_call);
1527     stubCall.addArgument(currentInstruction[1].u.operand);
1528     stubCall.call();
1529     noProfiler.link(this);
1530 }
1531
1532 void JIT::emit_op_get_arguments_length(Instruction* currentInstruction)
1533 {
1534     int dst = currentInstruction[1].u.operand;
1535     int argumentsRegister = currentInstruction[2].u.operand;
1536     addSlowCase(branch32(NotEqual, tagFor(argumentsRegister), TrustedImm32(JSValue::EmptyValueTag)));
1537     emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT0);
1538     sub32(TrustedImm32(1), regT0);
1539     emitStoreInt32(dst, regT0);
1540 }
1541
1542 void JIT::emitSlow_op_get_arguments_length(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1543 {
1544     linkSlowCase(iter);
1545     int dst = currentInstruction[1].u.operand;
1546     int base = currentInstruction[2].u.operand;
1547     int ident = currentInstruction[3].u.operand;
1548     
1549     JITStubCall stubCall(this, cti_op_get_by_id_generic);
1550     stubCall.addArgument(base);
1551     stubCall.addArgument(TrustedImmPtr(&(m_codeBlock->identifier(ident))));
1552     stubCall.call(dst);
1553 }
1554
1555 void JIT::emit_op_get_argument_by_val(Instruction* currentInstruction)
1556 {
1557     int dst = currentInstruction[1].u.operand;
1558     int argumentsRegister = currentInstruction[2].u.operand;
1559     int property = currentInstruction[3].u.operand;
1560     addSlowCase(branch32(NotEqual, tagFor(argumentsRegister), TrustedImm32(JSValue::EmptyValueTag)));
1561     emitLoad(property, regT1, regT2);
1562     addSlowCase(branch32(NotEqual, regT1, TrustedImm32(JSValue::Int32Tag)));
1563     add32(TrustedImm32(1), regT2);
1564     // regT2 now contains the integer index of the argument we want, including this
1565     emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT3);
1566     addSlowCase(branch32(AboveOrEqual, regT2, regT3));
1567     
1568     Jump skipOutofLineParams;
1569     int numArgs = m_codeBlock->m_numParameters;
1570     if (numArgs) {
1571         Jump notInInPlaceArgs = branch32(AboveOrEqual, regT2, Imm32(numArgs));
1572         addPtr(Imm32(static_cast<unsigned>(-(RegisterFile::CallFrameHeaderSize + numArgs) * sizeof(Register))), callFrameRegister, regT1);
1573         loadPtr(BaseIndex(regT1, regT2, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT0);
1574         loadPtr(BaseIndex(regT1, regT2, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT1);
1575         skipOutofLineParams = jump();
1576         notInInPlaceArgs.link(this);
1577     }
1578
1579     addPtr(Imm32(static_cast<unsigned>(-(RegisterFile::CallFrameHeaderSize + numArgs) * sizeof(Register))), callFrameRegister, regT1);
1580     mul32(TrustedImm32(sizeof(Register)), regT3, regT3);
1581     subPtr(regT3, regT1);
1582     loadPtr(BaseIndex(regT1, regT2, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT0);
1583     loadPtr(BaseIndex(regT1, regT2, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT1);
1584     if (numArgs)
1585         skipOutofLineParams.link(this);
1586     emitStore(dst, regT1, regT0);
1587 }
1588
1589 void JIT::emitSlow_op_get_argument_by_val(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1590 {
1591     unsigned dst = currentInstruction[1].u.operand;
1592     unsigned arguments = currentInstruction[2].u.operand;
1593     unsigned property = currentInstruction[3].u.operand;
1594
1595     linkSlowCase(iter);
1596     Jump skipArgumentsCreation = jump();
1597
1598     linkSlowCase(iter);
1599     linkSlowCase(iter);
1600     if (m_codeBlock->m_numParameters == 1)
1601         JITStubCall(this, cti_op_create_arguments_no_params).call();
1602     else
1603         JITStubCall(this, cti_op_create_arguments).call();
1604     
1605     emitStore(arguments, regT1, regT0);
1606     emitStore(unmodifiedArgumentsRegister(arguments), regT1, regT0);
1607     
1608     skipArgumentsCreation.link(this);
1609     JITStubCall stubCall(this, cti_op_get_by_val);
1610     stubCall.addArgument(arguments);
1611     stubCall.addArgument(property);
1612     stubCall.call(dst);
1613 }
1614
1615 #if ENABLE(JIT_USE_SOFT_MODULO)
1616 void JIT::softModulo()
1617 {
1618     push(regT1);
1619     push(regT3);
1620     move(regT2, regT3);
1621     move(regT0, regT2);
1622     move(TrustedImm32(0), regT1);
1623
1624     // Check for negative result reminder
1625     Jump positiveRegT3 = branch32(GreaterThanOrEqual, regT3, TrustedImm32(0));
1626     neg32(regT3);
1627     xor32(TrustedImm32(1), regT1);
1628     positiveRegT3.link(this);
1629
1630     Jump positiveRegT2 = branch32(GreaterThanOrEqual, regT2, TrustedImm32(0));
1631     neg32(regT2);
1632     xor32(TrustedImm32(2), regT1);
1633     positiveRegT2.link(this);
1634
1635     // Save the condition for negative reminder
1636     push(regT1);
1637
1638     Jump exitBranch = branch32(LessThan, regT2, regT3);
1639
1640     // Power of two fast case
1641     move(regT3, regT0);
1642     sub32(TrustedImm32(1), regT0);
1643     Jump powerOfTwo = branchTest32(NonZero, regT0, regT3);
1644     and32(regT0, regT2);
1645     powerOfTwo.link(this);
1646
1647     and32(regT3, regT0);
1648
1649     Jump exitBranch2 = branchTest32(Zero, regT0);
1650
1651     countLeadingZeros32(regT2, regT0);
1652     countLeadingZeros32(regT3, regT1);
1653     sub32(regT0, regT1);
1654
1655     Jump useFullTable = branch32(Equal, regT1, TrustedImm32(31));
1656
1657     neg32(regT1);
1658     add32(TrustedImm32(31), regT1);
1659
1660     int elementSizeByShift = -1;
1661 #if CPU(ARM)
1662     elementSizeByShift = 3;
1663 #else
1664 #error "JIT_USE_SOFT_MODULO not yet supported on this platform."
1665 #endif
1666     relativeTableJump(regT1, elementSizeByShift);
1667
1668     useFullTable.link(this);
1669     // Modulo table
1670     for (int i = 31; i > 0; --i) {
1671 #if CPU(ARM_TRADITIONAL)
1672         m_assembler.cmp_r(regT2, m_assembler.lsl(regT3, i));
1673         m_assembler.sub_r(regT2, regT2, m_assembler.lsl(regT3, i), ARMAssembler::CS);
1674 #elif CPU(ARM_THUMB2)
1675         ShiftTypeAndAmount shift(SRType_LSL, i);
1676         m_assembler.sub_S(regT1, regT2, regT3, shift);
1677         m_assembler.it(ARMv7Assembler::ConditionCS);
1678         m_assembler.mov(regT2, regT1);
1679 #else
1680 #error "JIT_USE_SOFT_MODULO not yet supported on this platform."
1681 #endif
1682     }
1683
1684     Jump lower = branch32(Below, regT2, regT3);
1685     sub32(regT3, regT2);
1686     lower.link(this);
1687
1688     exitBranch.link(this);
1689     exitBranch2.link(this);
1690
1691     // Check for negative reminder
1692     pop(regT1);
1693     Jump positiveResult = branch32(Equal, regT1, TrustedImm32(0));
1694     neg32(regT2);
1695     positiveResult.link(this);
1696
1697     move(regT2, regT0);
1698
1699     pop(regT3);
1700     pop(regT1);
1701     ret();
1702 }
1703 #endif // ENABLE(JIT_USE_SOFT_MODULO)
1704
1705 } // namespace JSC
1706
1707 #endif // USE(JSVALUE32_64)
1708 #endif // ENABLE(JIT)