initial import
[vuplus_webkit] / Source / JavaScriptCore / jit / JIT.h
1 /*
2  * Copyright (C) 2008 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #ifndef JIT_h
27 #define JIT_h
28
29 #if ENABLE(JIT)
30
31 // Verbose logging for OSR-related code.
32 #define ENABLE_JIT_VERBOSE_OSR 0
33
34 // We've run into some problems where changing the size of the class JIT leads to
35 // performance fluctuations.  Try forcing alignment in an attempt to stabalize this.
36 #if COMPILER(GCC)
37 #define JIT_CLASS_ALIGNMENT __attribute__ ((aligned (32)))
38 #else
39 #define JIT_CLASS_ALIGNMENT
40 #endif
41
42 #define ASSERT_JIT_OFFSET_UNUSED(variable, actual, expected) ASSERT_WITH_MESSAGE_UNUSED(variable, actual == expected, "JIT Offset \"%s\" should be %d, not %d.\n", #expected, static_cast<int>(expected), static_cast<int>(actual));
43 #define ASSERT_JIT_OFFSET(actual, expected) ASSERT_WITH_MESSAGE(actual == expected, "JIT Offset \"%s\" should be %d, not %d.\n", #expected, static_cast<int>(expected), static_cast<int>(actual));
44
45 #include "CodeBlock.h"
46 #include "CompactJITCodeMap.h"
47 #include "Interpreter.h"
48 #include "JSInterfaceJIT.h"
49 #include "Opcode.h"
50 #include "Profiler.h"
51 #include <bytecode/SamplingTool.h>
52
53 namespace JSC {
54
55     class CodeBlock;
56     class FunctionExecutable;
57     class JIT;
58     class JSPropertyNameIterator;
59     class Interpreter;
60     class Register;
61     class RegisterFile;
62     class ScopeChainNode;
63     class StructureChain;
64
65     struct CallLinkInfo;
66     struct Instruction;
67     struct OperandTypes;
68     struct PolymorphicAccessStructureList;
69     struct SimpleJumpTable;
70     struct StringJumpTable;
71     struct StructureStubInfo;
72
73     struct CallRecord {
74         MacroAssembler::Call from;
75         unsigned bytecodeOffset;
76         void* to;
77
78         CallRecord()
79         {
80         }
81
82         CallRecord(MacroAssembler::Call from, unsigned bytecodeOffset, void* to = 0)
83             : from(from)
84             , bytecodeOffset(bytecodeOffset)
85             , to(to)
86         {
87         }
88     };
89
90     struct JumpTable {
91         MacroAssembler::Jump from;
92         unsigned toBytecodeOffset;
93
94         JumpTable(MacroAssembler::Jump f, unsigned t)
95             : from(f)
96             , toBytecodeOffset(t)
97         {
98         }
99     };
100
101     struct SlowCaseEntry {
102         MacroAssembler::Jump from;
103         unsigned to;
104         unsigned hint;
105         
106         SlowCaseEntry(MacroAssembler::Jump f, unsigned t, unsigned h = 0)
107             : from(f)
108             , to(t)
109             , hint(h)
110         {
111         }
112     };
113
114     struct SwitchRecord {
115         enum Type {
116             Immediate,
117             Character,
118             String
119         };
120
121         Type type;
122
123         union {
124             SimpleJumpTable* simpleJumpTable;
125             StringJumpTable* stringJumpTable;
126         } jumpTable;
127
128         unsigned bytecodeOffset;
129         unsigned defaultOffset;
130
131         SwitchRecord(SimpleJumpTable* jumpTable, unsigned bytecodeOffset, unsigned defaultOffset, Type type)
132             : type(type)
133             , bytecodeOffset(bytecodeOffset)
134             , defaultOffset(defaultOffset)
135         {
136             this->jumpTable.simpleJumpTable = jumpTable;
137         }
138
139         SwitchRecord(StringJumpTable* jumpTable, unsigned bytecodeOffset, unsigned defaultOffset)
140             : type(String)
141             , bytecodeOffset(bytecodeOffset)
142             , defaultOffset(defaultOffset)
143         {
144             this->jumpTable.stringJumpTable = jumpTable;
145         }
146     };
147
148     struct PropertyStubCompilationInfo {
149         MacroAssembler::Call callReturnLocation;
150         MacroAssembler::Label hotPathBegin;
151     };
152
153     struct StructureStubCompilationInfo {
154         MacroAssembler::DataLabelPtr hotPathBegin;
155         MacroAssembler::Call hotPathOther;
156         MacroAssembler::Call callReturnLocation;
157         bool isCall;
158     };
159
160     struct MethodCallCompilationInfo {
161         MethodCallCompilationInfo(unsigned propertyAccessIndex)
162             : propertyAccessIndex(propertyAccessIndex)
163         {
164         }
165
166         MacroAssembler::DataLabelPtr structureToCompare;
167         unsigned propertyAccessIndex;
168     };
169
170     // Near calls can only be patched to other JIT code, regular calls can be patched to JIT code or relinked to stub functions.
171     void ctiPatchNearCallByReturnAddress(CodeBlock* codeblock, ReturnAddressPtr returnAddress, MacroAssemblerCodePtr newCalleeFunction);
172     void ctiPatchCallByReturnAddress(CodeBlock* codeblock, ReturnAddressPtr returnAddress, MacroAssemblerCodePtr newCalleeFunction);
173     void ctiPatchCallByReturnAddress(CodeBlock* codeblock, ReturnAddressPtr returnAddress, FunctionPtr newCalleeFunction);
174
175     class JIT : private JSInterfaceJIT {
176         friend class JITStubCall;
177
178         using MacroAssembler::Jump;
179         using MacroAssembler::JumpList;
180         using MacroAssembler::Label;
181
182         static const int patchGetByIdDefaultStructure = -1;
183         static const int patchGetByIdDefaultOffset = 0;
184         // Magic number - initial offset cannot be representable as a signed 8bit value, or the X86Assembler
185         // will compress the displacement, and we may not be able to fit a patched offset.
186         static const int patchPutByIdDefaultOffset = 256;
187
188     public:
189         static JITCode compile(JSGlobalData* globalData, CodeBlock* codeBlock, CodePtr* functionEntryArityCheck = 0)
190         {
191             return JIT(globalData, codeBlock).privateCompile(functionEntryArityCheck);
192         }
193
194         static void compileGetByIdProto(JSGlobalData* globalData, CallFrame* callFrame, CodeBlock* codeBlock, StructureStubInfo* stubInfo, Structure* structure, Structure* prototypeStructure, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset, ReturnAddressPtr returnAddress)
195         {
196             JIT jit(globalData, codeBlock);
197             jit.privateCompileGetByIdProto(stubInfo, structure, prototypeStructure, ident, slot, cachedOffset, returnAddress, callFrame);
198         }
199
200         static void compileGetByIdSelfList(JSGlobalData* globalData, CodeBlock* codeBlock, StructureStubInfo* stubInfo, PolymorphicAccessStructureList* polymorphicStructures, int currentIndex, Structure* structure, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset)
201         {
202             JIT jit(globalData, codeBlock);
203             jit.privateCompileGetByIdSelfList(stubInfo, polymorphicStructures, currentIndex, structure, ident, slot, cachedOffset);
204         }
205         static void compileGetByIdProtoList(JSGlobalData* globalData, CallFrame* callFrame, CodeBlock* codeBlock, StructureStubInfo* stubInfo, PolymorphicAccessStructureList* prototypeStructureList, int currentIndex, Structure* structure, Structure* prototypeStructure, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset)
206         {
207             JIT jit(globalData, codeBlock);
208             jit.privateCompileGetByIdProtoList(stubInfo, prototypeStructureList, currentIndex, structure, prototypeStructure, ident, slot, cachedOffset, callFrame);
209         }
210         static void compileGetByIdChainList(JSGlobalData* globalData, CallFrame* callFrame, CodeBlock* codeBlock, StructureStubInfo* stubInfo, PolymorphicAccessStructureList* prototypeStructureList, int currentIndex, Structure* structure, StructureChain* chain, size_t count, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset)
211         {
212             JIT jit(globalData, codeBlock);
213             jit.privateCompileGetByIdChainList(stubInfo, prototypeStructureList, currentIndex, structure, chain, count, ident, slot, cachedOffset, callFrame);
214         }
215
216         static void compileGetByIdChain(JSGlobalData* globalData, CallFrame* callFrame, CodeBlock* codeBlock, StructureStubInfo* stubInfo, Structure* structure, StructureChain* chain, size_t count, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset, ReturnAddressPtr returnAddress)
217         {
218             JIT jit(globalData, codeBlock);
219             jit.privateCompileGetByIdChain(stubInfo, structure, chain, count, ident, slot, cachedOffset, returnAddress, callFrame);
220         }
221         
222         static void compilePutByIdTransition(JSGlobalData* globalData, CodeBlock* codeBlock, StructureStubInfo* stubInfo, Structure* oldStructure, Structure* newStructure, size_t cachedOffset, StructureChain* chain, ReturnAddressPtr returnAddress, bool direct)
223         {
224             JIT jit(globalData, codeBlock);
225             jit.privateCompilePutByIdTransition(stubInfo, oldStructure, newStructure, cachedOffset, chain, returnAddress, direct);
226         }
227
228         static PassRefPtr<ExecutableMemoryHandle> compileCTIMachineTrampolines(JSGlobalData* globalData, TrampolineStructure *trampolines)
229         {
230             if (!globalData->canUseJIT())
231                 return 0;
232             JIT jit(globalData, 0);
233             return jit.privateCompileCTIMachineTrampolines(globalData, trampolines);
234         }
235
236         static CodeRef compileCTINativeCall(JSGlobalData* globalData, NativeFunction func)
237         {
238             if (!globalData->canUseJIT())
239                 return CodeRef();
240             JIT jit(globalData, 0);
241             return jit.privateCompileCTINativeCall(globalData, func);
242         }
243
244         static void patchGetByIdSelf(CodeBlock* codeblock, StructureStubInfo*, Structure*, size_t cachedOffset, ReturnAddressPtr returnAddress);
245         static void patchPutByIdReplace(CodeBlock* codeblock, StructureStubInfo*, Structure*, size_t cachedOffset, ReturnAddressPtr returnAddress, bool direct);
246         static void patchMethodCallProto(JSGlobalData&, CodeBlock* codeblock, MethodCallLinkInfo&, JSObject*, Structure*, JSObject*, ReturnAddressPtr);
247
248         static void compilePatchGetArrayLength(JSGlobalData* globalData, CodeBlock* codeBlock, ReturnAddressPtr returnAddress)
249         {
250             JIT jit(globalData, codeBlock);
251             return jit.privateCompilePatchGetArrayLength(returnAddress);
252         }
253
254         static void linkFor(JSFunction* callee, CodeBlock* callerCodeBlock, CodeBlock* calleeCodeBlock, CodePtr, CallLinkInfo*, int callerArgCount, JSGlobalData*, CodeSpecializationKind);
255
256     private:
257         struct JSRInfo {
258             DataLabelPtr storeLocation;
259             Label target;
260
261             JSRInfo(DataLabelPtr storeLocation, Label targetLocation)
262                 : storeLocation(storeLocation)
263                 , target(targetLocation)
264             {
265             }
266         };
267
268         JIT(JSGlobalData*, CodeBlock* = 0);
269
270         void privateCompileMainPass();
271         void privateCompileLinkPass();
272         void privateCompileSlowCases();
273         JITCode privateCompile(CodePtr* functionEntryArityCheck);
274         void privateCompileGetByIdProto(StructureStubInfo*, Structure*, Structure* prototypeStructure, const Identifier&, const PropertySlot&, size_t cachedOffset, ReturnAddressPtr returnAddress, CallFrame* callFrame);
275         void privateCompileGetByIdSelfList(StructureStubInfo*, PolymorphicAccessStructureList*, int, Structure*, const Identifier&, const PropertySlot&, size_t cachedOffset);
276         void privateCompileGetByIdProtoList(StructureStubInfo*, PolymorphicAccessStructureList*, int, Structure*, Structure* prototypeStructure, const Identifier&, const PropertySlot&, size_t cachedOffset, CallFrame* callFrame);
277         void privateCompileGetByIdChainList(StructureStubInfo*, PolymorphicAccessStructureList*, int, Structure*, StructureChain* chain, size_t count, const Identifier&, const PropertySlot&, size_t cachedOffset, CallFrame* callFrame);
278         void privateCompileGetByIdChain(StructureStubInfo*, Structure*, StructureChain*, size_t count, const Identifier&, const PropertySlot&, size_t cachedOffset, ReturnAddressPtr returnAddress, CallFrame* callFrame);
279         void privateCompilePutByIdTransition(StructureStubInfo*, Structure*, Structure*, size_t cachedOffset, StructureChain*, ReturnAddressPtr returnAddress, bool direct);
280
281         PassRefPtr<ExecutableMemoryHandle> privateCompileCTIMachineTrampolines(JSGlobalData*, TrampolineStructure*);
282         Label privateCompileCTINativeCall(JSGlobalData*, bool isConstruct = false);
283         CodeRef privateCompileCTINativeCall(JSGlobalData*, NativeFunction);
284         void privateCompilePatchGetArrayLength(ReturnAddressPtr returnAddress);
285
286         void addSlowCase(Jump);
287         void addSlowCase(JumpList);
288         void addJump(Jump, int);
289         void emitJumpSlowToHot(Jump, int);
290
291         void compileOpCall(OpcodeID, Instruction* instruction, unsigned callLinkInfoIndex);
292         void compileOpCallVarargs(Instruction* instruction);
293         void compileOpCallInitializeCallFrame();
294         void compileOpCallSlowCase(Instruction* instruction, Vector<SlowCaseEntry>::iterator& iter, unsigned callLinkInfoIndex, OpcodeID opcodeID);
295         void compileOpCallVarargsSlowCase(Instruction* instruction, Vector<SlowCaseEntry>::iterator& iter);
296
297         enum CompileOpStrictEqType { OpStrictEq, OpNStrictEq };
298         void compileOpStrictEq(Instruction* instruction, CompileOpStrictEqType type);
299         bool isOperandConstantImmediateDouble(unsigned src);
300         
301         void emitLoadDouble(unsigned index, FPRegisterID value);
302         void emitLoadInt32ToDouble(unsigned index, FPRegisterID value);
303         Jump emitJumpIfNotObject(RegisterID structureReg);
304
305         void testPrototype(JSValue, JumpList& failureCases);
306
307         void emitWriteBarrier(RegisterID owner, RegisterID scratch, WriteBarrierUseKind);
308
309         template<typename ClassType, typename StructureType> void emitAllocateBasicJSObject(StructureType, void* vtable, RegisterID result, RegisterID storagePtr);
310         template<typename T> void emitAllocateJSFinalObject(T structure, RegisterID result, RegisterID storagePtr);
311         void emitAllocateJSFunction(FunctionExecutable*, RegisterID scopeChain, RegisterID result, RegisterID storagePtr);
312         
313         enum ValueProfilingSiteKind { FirstProfilingSite, SubsequentProfilingSite };
314 #if ENABLE(VALUE_PROFILER)
315         // This assumes that the value to profile is in regT0 and that regT3 is available for
316         // scratch.
317         void emitValueProfilingSite(ValueProfilingSiteKind);
318 #else
319         void emitValueProfilingSite(ValueProfilingSiteKind) { }
320 #endif
321
322 #if USE(JSVALUE32_64)
323         bool getOperandConstantImmediateInt(unsigned op1, unsigned op2, unsigned& op, int32_t& constant);
324
325         void emitLoadTag(unsigned index, RegisterID tag);
326         void emitLoadPayload(unsigned index, RegisterID payload);
327
328         void emitLoad(const JSValue& v, RegisterID tag, RegisterID payload);
329         void emitLoad(unsigned index, RegisterID tag, RegisterID payload, RegisterID base = callFrameRegister);
330         void emitLoad2(unsigned index1, RegisterID tag1, RegisterID payload1, unsigned index2, RegisterID tag2, RegisterID payload2);
331
332         void emitStore(unsigned index, RegisterID tag, RegisterID payload, RegisterID base = callFrameRegister);
333         void emitStore(unsigned index, const JSValue constant, RegisterID base = callFrameRegister);
334         void emitStoreInt32(unsigned index, RegisterID payload, bool indexIsInt32 = false);
335         void emitStoreInt32(unsigned index, TrustedImm32 payload, bool indexIsInt32 = false);
336         void emitStoreCell(unsigned index, RegisterID payload, bool indexIsCell = false);
337         void emitStoreBool(unsigned index, RegisterID payload, bool indexIsBool = false);
338         void emitStoreDouble(unsigned index, FPRegisterID value);
339
340         bool isLabeled(unsigned bytecodeOffset);
341         void map(unsigned bytecodeOffset, unsigned virtualRegisterIndex, RegisterID tag, RegisterID payload);
342         void unmap(RegisterID);
343         void unmap();
344         bool isMapped(unsigned virtualRegisterIndex);
345         bool getMappedPayload(unsigned virtualRegisterIndex, RegisterID& payload);
346         bool getMappedTag(unsigned virtualRegisterIndex, RegisterID& tag);
347
348         void emitJumpSlowCaseIfNotJSCell(unsigned virtualRegisterIndex);
349         void emitJumpSlowCaseIfNotJSCell(unsigned virtualRegisterIndex, RegisterID tag);
350         void linkSlowCaseIfNotJSCell(Vector<SlowCaseEntry>::iterator&, unsigned virtualRegisterIndex);
351
352         void compileGetByIdHotPath();
353         void compileGetByIdSlowCase(int resultVReg, int baseVReg, Identifier* ident, Vector<SlowCaseEntry>::iterator& iter, bool isMethodCheck = false);
354         void compileGetDirectOffset(RegisterID base, RegisterID resultTag, RegisterID resultPayload, Structure* structure, size_t cachedOffset);
355         void compileGetDirectOffset(JSObject* base, RegisterID resultTag, RegisterID resultPayload, size_t cachedOffset);
356         void compileGetDirectOffset(RegisterID base, RegisterID resultTag, RegisterID resultPayload, RegisterID offset);
357         void compilePutDirectOffset(RegisterID base, RegisterID valueTag, RegisterID valuePayload, Structure* structure, size_t cachedOffset);
358
359         // Arithmetic opcode helpers
360         void emitAdd32Constant(unsigned dst, unsigned op, int32_t constant, ResultType opType);
361         void emitSub32Constant(unsigned dst, unsigned op, int32_t constant, ResultType opType);
362         void emitBinaryDoubleOp(OpcodeID, unsigned dst, unsigned op1, unsigned op2, OperandTypes, JumpList& notInt32Op1, JumpList& notInt32Op2, bool op1IsInRegisters = true, bool op2IsInRegisters = true);
363
364 #if CPU(X86)
365         // These architecture specific value are used to enable patching - see comment on op_put_by_id.
366         static const int patchOffsetPutByIdStructure = 7;
367         static const int patchOffsetPutByIdPropertyMapOffset1 = 22;
368         static const int patchOffsetPutByIdPropertyMapOffset2 = 28;
369         // These architecture specific value are used to enable patching - see comment on op_get_by_id.
370         static const int patchOffsetGetByIdStructure = 7;
371         static const int patchOffsetGetByIdBranchToSlowCase = 13;
372         static const int patchOffsetGetByIdPropertyMapOffset1 = 19;
373         static const int patchOffsetGetByIdPropertyMapOffset2 = 22;
374         static const int patchOffsetGetByIdPutResult = 22;
375 #if ENABLE(OPCODE_SAMPLING)
376         static const int patchOffsetGetByIdSlowCaseCall = 37;
377 #else
378         static const int patchOffsetGetByIdSlowCaseCall = 33;
379 #endif
380         static const int patchOffsetOpCallCompareToJump = 6;
381
382         static const int patchOffsetMethodCheckProtoObj = 11;
383         static const int patchOffsetMethodCheckProtoStruct = 18;
384         static const int patchOffsetMethodCheckPutFunction = 29;
385 #elif CPU(ARM_TRADITIONAL)
386         // These architecture specific value are used to enable patching - see comment on op_put_by_id.
387         static const int patchOffsetPutByIdStructure = 4;
388         static const int patchOffsetPutByIdPropertyMapOffset1 = 20;
389         static const int patchOffsetPutByIdPropertyMapOffset2 = 28;
390         // These architecture specific value are used to enable patching - see comment on op_get_by_id.
391         static const int patchOffsetGetByIdStructure = 4;
392         static const int patchOffsetGetByIdBranchToSlowCase = 16;
393         static const int patchOffsetGetByIdPropertyMapOffset1 = 20;
394         static const int patchOffsetGetByIdPropertyMapOffset2 = 28;
395         static const int patchOffsetGetByIdPutResult = 36;
396 #if ENABLE(OPCODE_SAMPLING)
397         #error "OPCODE_SAMPLING is not yet supported"
398 #else
399         static const int patchOffsetGetByIdSlowCaseCall = 40;
400 #endif
401         static const int patchOffsetOpCallCompareToJump = 12;
402
403         static const int patchOffsetMethodCheckProtoObj = 12;
404         static const int patchOffsetMethodCheckProtoStruct = 20;
405         static const int patchOffsetMethodCheckPutFunction = 32;
406
407         // sequenceOpCall
408         static const int sequenceOpCallInstructionSpace = 12;
409         static const int sequenceOpCallConstantSpace = 2;
410         // sequenceMethodCheck
411         static const int sequenceMethodCheckInstructionSpace = 40;
412         static const int sequenceMethodCheckConstantSpace = 6;
413         // sequenceGetByIdHotPath
414         static const int sequenceGetByIdHotPathInstructionSpace = 36;
415         static const int sequenceGetByIdHotPathConstantSpace = 4;
416         // sequenceGetByIdSlowCase
417         static const int sequenceGetByIdSlowCaseInstructionSpace = 56;
418         static const int sequenceGetByIdSlowCaseConstantSpace = 3;
419         // sequencePutById
420         static const int sequencePutByIdInstructionSpace = 36;
421         static const int sequencePutByIdConstantSpace = 4;
422 #elif CPU(ARM_THUMB2)
423         // These architecture specific value are used to enable patching - see comment on op_put_by_id.
424         static const int patchOffsetPutByIdStructure = 10;
425         static const int patchOffsetPutByIdPropertyMapOffset1 = 36;
426         static const int patchOffsetPutByIdPropertyMapOffset2 = 48;
427         // These architecture specific value are used to enable patching - see comment on op_get_by_id.
428         static const int patchOffsetGetByIdStructure = 10;
429         static const int patchOffsetGetByIdBranchToSlowCase = 26;
430         static const int patchOffsetGetByIdPropertyMapOffset1 = 28;
431         static const int patchOffsetGetByIdPropertyMapOffset2 = 30;
432         static const int patchOffsetGetByIdPutResult = 32;
433 #if ENABLE(OPCODE_SAMPLING)
434         #error "OPCODE_SAMPLING is not yet supported"
435 #else
436         static const int patchOffsetGetByIdSlowCaseCall = 40;
437 #endif
438         static const int patchOffsetOpCallCompareToJump = 16;
439
440         static const int patchOffsetMethodCheckProtoObj = 24;
441         static const int patchOffsetMethodCheckProtoStruct = 34;
442         static const int patchOffsetMethodCheckPutFunction = 58;
443
444         // sequenceOpCall
445         static const int sequenceOpCallInstructionSpace = 12;
446         static const int sequenceOpCallConstantSpace = 2;
447         // sequenceMethodCheck
448         static const int sequenceMethodCheckInstructionSpace = 40;
449         static const int sequenceMethodCheckConstantSpace = 6;
450         // sequenceGetByIdHotPath
451         static const int sequenceGetByIdHotPathInstructionSpace = 36;
452         static const int sequenceGetByIdHotPathConstantSpace = 4;
453         // sequenceGetByIdSlowCase
454         static const int sequenceGetByIdSlowCaseInstructionSpace = 40;
455         static const int sequenceGetByIdSlowCaseConstantSpace = 2;
456         // sequencePutById
457         static const int sequencePutByIdInstructionSpace = 36;
458         static const int sequencePutByIdConstantSpace = 4;
459 #elif CPU(MIPS)
460 #if WTF_MIPS_ISA(1)
461         static const int patchOffsetPutByIdStructure = 16;
462         static const int patchOffsetPutByIdPropertyMapOffset1 = 56;
463         static const int patchOffsetPutByIdPropertyMapOffset2 = 72;
464         static const int patchOffsetGetByIdStructure = 16;
465         static const int patchOffsetGetByIdBranchToSlowCase = 48;
466         static const int patchOffsetGetByIdPropertyMapOffset1 = 56;
467         static const int patchOffsetGetByIdPropertyMapOffset2 = 76;
468         static const int patchOffsetGetByIdPutResult = 96;
469 #if ENABLE(OPCODE_SAMPLING)
470         #error "OPCODE_SAMPLING is not yet supported"
471 #else
472         static const int patchOffsetGetByIdSlowCaseCall = 56;
473 #endif
474         static const int patchOffsetOpCallCompareToJump = 32;
475         static const int patchOffsetMethodCheckProtoObj = 32;
476         static const int patchOffsetMethodCheckProtoStruct = 56;
477         static const int patchOffsetMethodCheckPutFunction = 88;
478 #else // WTF_MIPS_ISA(1)
479         static const int patchOffsetPutByIdStructure = 12;
480         static const int patchOffsetPutByIdPropertyMapOffset1 = 48;
481         static const int patchOffsetPutByIdPropertyMapOffset2 = 64;
482         static const int patchOffsetGetByIdStructure = 12;
483         static const int patchOffsetGetByIdBranchToSlowCase = 44;
484         static const int patchOffsetGetByIdPropertyMapOffset1 = 48;
485         static const int patchOffsetGetByIdPropertyMapOffset2 = 64;
486         static const int patchOffsetGetByIdPutResult = 80;
487 #if ENABLE(OPCODE_SAMPLING)
488         #error "OPCODE_SAMPLING is not yet supported"
489 #else
490         static const int patchOffsetGetByIdSlowCaseCall = 56;
491 #endif
492         static const int patchOffsetOpCallCompareToJump = 32;
493         static const int patchOffsetMethodCheckProtoObj = 32;
494         static const int patchOffsetMethodCheckProtoStruct = 52;
495         static const int patchOffsetMethodCheckPutFunction = 84;
496 #endif
497 #elif CPU(SH4)
498        // These architecture specific value are used to enable patching - see comment on op_put_by_id.
499         static const int patchOffsetGetByIdStructure = 6;
500         static const int patchOffsetPutByIdPropertyMapOffset = 24;
501         static const int patchOffsetPutByIdStructure = 6;
502         // These architecture specific value are used to enable patching - see comment on op_get_by_id.
503         static const int patchOffsetGetByIdBranchToSlowCase = 10;
504         static const int patchOffsetGetByIdPropertyMapOffset = 24;
505         static const int patchOffsetGetByIdPutResult = 24;
506
507         // sequenceOpCall
508         static const int sequenceOpCallInstructionSpace = 12;
509         static const int sequenceOpCallConstantSpace = 2;
510         // sequenceMethodCheck
511         static const int sequenceMethodCheckInstructionSpace = 40;
512         static const int sequenceMethodCheckConstantSpace = 6;
513         // sequenceGetByIdHotPath
514         static const int sequenceGetByIdHotPathInstructionSpace = 36;
515         static const int sequenceGetByIdHotPathConstantSpace = 5;
516         // sequenceGetByIdSlowCase
517         static const int sequenceGetByIdSlowCaseInstructionSpace = 30;
518         static const int sequenceGetByIdSlowCaseConstantSpace = 3;
519         // sequencePutById
520         static const int sequencePutByIdInstructionSpace = 36;
521         static const int sequencePutByIdConstantSpace = 5;
522
523         static const int patchOffsetGetByIdPropertyMapOffset1 = 20;
524         static const int patchOffsetGetByIdPropertyMapOffset2 = 22;
525
526         static const int patchOffsetPutByIdPropertyMapOffset1 = 20;
527         static const int patchOffsetPutByIdPropertyMapOffset2 = 26;
528
529 #if ENABLE(OPCODE_SAMPLING)
530         static const int patchOffsetGetByIdSlowCaseCall = 0; // FIMXE
531 #else
532         static const int patchOffsetGetByIdSlowCaseCall = 26;
533 #endif
534         static const int patchOffsetOpCallCompareToJump = 4;
535
536         static const int patchOffsetMethodCheckProtoObj = 12;
537         static const int patchOffsetMethodCheckProtoStruct = 20;
538         static const int patchOffsetMethodCheckPutFunction = 32;
539 #else
540 #error "JSVALUE32_64 not supported on this platform."
541 #endif
542
543 #else // USE(JSVALUE32_64)
544         void emitGetVirtualRegister(int src, RegisterID dst);
545         void emitGetVirtualRegisters(int src1, RegisterID dst1, int src2, RegisterID dst2);
546         void emitPutVirtualRegister(unsigned dst, RegisterID from = regT0);
547         void emitStoreCell(unsigned dst, RegisterID payload, bool /* only used in JSValue32_64 */ = false)
548         {
549             emitPutVirtualRegister(dst, payload);
550         }
551
552         int32_t getConstantOperandImmediateInt(unsigned src);
553
554         void killLastResultRegister();
555
556         Jump emitJumpIfJSCell(RegisterID);
557         Jump emitJumpIfBothJSCells(RegisterID, RegisterID, RegisterID);
558         void emitJumpSlowCaseIfJSCell(RegisterID);
559         Jump emitJumpIfNotJSCell(RegisterID);
560         void emitJumpSlowCaseIfNotJSCell(RegisterID);
561         void emitJumpSlowCaseIfNotJSCell(RegisterID, int VReg);
562 #if USE(JSVALUE32_64)
563         JIT::Jump emitJumpIfImmediateNumber(RegisterID reg)
564         {
565             return emitJumpIfImmediateInteger(reg);
566         }
567         
568         JIT::Jump emitJumpIfNotImmediateNumber(RegisterID reg)
569         {
570             return emitJumpIfNotImmediateInteger(reg);
571         }
572 #endif
573         Jump emitJumpIfImmediateInteger(RegisterID);
574         Jump emitJumpIfNotImmediateInteger(RegisterID);
575         Jump emitJumpIfNotImmediateIntegers(RegisterID, RegisterID, RegisterID);
576         void emitJumpSlowCaseIfNotImmediateInteger(RegisterID);
577         void emitJumpSlowCaseIfNotImmediateNumber(RegisterID);
578         void emitJumpSlowCaseIfNotImmediateIntegers(RegisterID, RegisterID, RegisterID);
579
580 #if USE(JSVALUE32_64)
581         void emitFastArithDeTagImmediate(RegisterID);
582         Jump emitFastArithDeTagImmediateJumpIfZero(RegisterID);
583 #endif
584         void emitFastArithReTagImmediate(RegisterID src, RegisterID dest);
585         void emitFastArithIntToImmNoCheck(RegisterID src, RegisterID dest);
586
587         void emitTagAsBoolImmediate(RegisterID reg);
588         void compileBinaryArithOp(OpcodeID, unsigned dst, unsigned src1, unsigned src2, OperandTypes opi);
589 #if USE(JSVALUE64)
590         void compileBinaryArithOpSlowCase(OpcodeID, Vector<SlowCaseEntry>::iterator&, unsigned dst, unsigned src1, unsigned src2, OperandTypes, bool op1HasImmediateIntFastCase, bool op2HasImmediateIntFastCase);
591 #else
592         void compileBinaryArithOpSlowCase(OpcodeID, Vector<SlowCaseEntry>::iterator&, unsigned dst, unsigned src1, unsigned src2, OperandTypes);
593 #endif
594
595         void compileGetByIdHotPath(int baseVReg, Identifier*);
596         void compileGetByIdSlowCase(int resultVReg, int baseVReg, Identifier* ident, Vector<SlowCaseEntry>::iterator& iter, bool isMethodCheck = false);
597         void compileGetDirectOffset(RegisterID base, RegisterID result, Structure* structure, size_t cachedOffset);
598         void compileGetDirectOffset(JSObject* base, RegisterID result, size_t cachedOffset);
599         void compileGetDirectOffset(RegisterID base, RegisterID result, RegisterID offset, RegisterID scratch);
600         void compilePutDirectOffset(RegisterID base, RegisterID value, Structure* structure, size_t cachedOffset);
601
602 #if CPU(X86_64)
603         // These architecture specific value are used to enable patching - see comment on op_put_by_id.
604         static const int patchOffsetPutByIdStructure = 10;
605         static const int patchOffsetPutByIdPropertyMapOffset = 31;
606         // These architecture specific value are used to enable patching - see comment on op_get_by_id.
607         static const int patchOffsetGetByIdStructure = 10;
608         static const int patchOffsetGetByIdBranchToSlowCase = 20;
609         static const int patchOffsetGetByIdPropertyMapOffset = 28;
610         static const int patchOffsetGetByIdPutResult = 28;
611 #if ENABLE(OPCODE_SAMPLING)
612         static const int patchOffsetGetByIdSlowCaseCall = 64;
613 #else
614         static const int patchOffsetGetByIdSlowCaseCall = 54;
615 #endif
616         static const int patchOffsetOpCallCompareToJump = 9;
617
618         static const int patchOffsetMethodCheckProtoObj = 20;
619         static const int patchOffsetMethodCheckProtoStruct = 30;
620         static const int patchOffsetMethodCheckPutFunction = 50;
621 #elif CPU(X86)
622         // These architecture specific value are used to enable patching - see comment on op_put_by_id.
623         static const int patchOffsetPutByIdStructure = 7;
624         static const int patchOffsetPutByIdPropertyMapOffset = 22;
625         // These architecture specific value are used to enable patching - see comment on op_get_by_id.
626         static const int patchOffsetGetByIdStructure = 7;
627         static const int patchOffsetGetByIdBranchToSlowCase = 13;
628         static const int patchOffsetGetByIdPropertyMapOffset = 22;
629         static const int patchOffsetGetByIdPutResult = 22;
630 #if ENABLE(OPCODE_SAMPLING)
631         static const int patchOffsetGetByIdSlowCaseCall = 33;
632 #else
633         static const int patchOffsetGetByIdSlowCaseCall = 23;
634 #endif
635         static const int patchOffsetOpCallCompareToJump = 6;
636
637         static const int patchOffsetMethodCheckProtoObj = 11;
638         static const int patchOffsetMethodCheckProtoStruct = 18;
639         static const int patchOffsetMethodCheckPutFunction = 29;
640 #elif CPU(ARM_THUMB2)
641         // These architecture specific value are used to enable patching - see comment on op_put_by_id.
642         static const int patchOffsetPutByIdStructure = 10;
643         static const int patchOffsetPutByIdPropertyMapOffset = 46;
644         // These architecture specific value are used to enable patching - see comment on op_get_by_id.
645         static const int patchOffsetGetByIdStructure = 10;
646         static const int patchOffsetGetByIdBranchToSlowCase = 26;
647         static const int patchOffsetGetByIdPropertyMapOffset = 46;
648         static const int patchOffsetGetByIdPutResult = 50;
649 #if ENABLE(OPCODE_SAMPLING)
650         static const int patchOffsetGetByIdSlowCaseCall = 0; // FIMXE
651 #else
652         static const int patchOffsetGetByIdSlowCaseCall = 28;
653 #endif
654         static const int patchOffsetOpCallCompareToJump = 16;
655
656         static const int patchOffsetMethodCheckProtoObj = 24;
657         static const int patchOffsetMethodCheckProtoStruct = 34;
658         static const int patchOffsetMethodCheckPutFunction = 58;
659 #elif CPU(ARM_TRADITIONAL)
660         // These architecture specific value are used to enable patching - see comment on op_put_by_id.
661         static const int patchOffsetPutByIdStructure = 4;
662         static const int patchOffsetPutByIdPropertyMapOffset = 20;
663         // These architecture specific value are used to enable patching - see comment on op_get_by_id.
664         static const int patchOffsetGetByIdStructure = 4;
665         static const int patchOffsetGetByIdBranchToSlowCase = 16;
666         static const int patchOffsetGetByIdPropertyMapOffset = 20;
667         static const int patchOffsetGetByIdPutResult = 28;
668 #if ENABLE(OPCODE_SAMPLING)
669         #error "OPCODE_SAMPLING is not yet supported"
670 #else
671         static const int patchOffsetGetByIdSlowCaseCall = 28;
672 #endif
673         static const int patchOffsetOpCallCompareToJump = 12;
674
675         static const int patchOffsetMethodCheckProtoObj = 12;
676         static const int patchOffsetMethodCheckProtoStruct = 20;
677         static const int patchOffsetMethodCheckPutFunction = 32;
678
679         // sequenceOpCall
680         static const int sequenceOpCallInstructionSpace = 12;
681         static const int sequenceOpCallConstantSpace = 2;
682         // sequenceMethodCheck
683         static const int sequenceMethodCheckInstructionSpace = 40;
684         static const int sequenceMethodCheckConstantSpace = 6;
685         // sequenceGetByIdHotPath
686         static const int sequenceGetByIdHotPathInstructionSpace = 28;
687         static const int sequenceGetByIdHotPathConstantSpace = 3;
688         // sequenceGetByIdSlowCase
689         static const int sequenceGetByIdSlowCaseInstructionSpace = 32;
690         static const int sequenceGetByIdSlowCaseConstantSpace = 2;
691         // sequencePutById
692         static const int sequencePutByIdInstructionSpace = 28;
693         static const int sequencePutByIdConstantSpace = 3;
694 #elif CPU(MIPS)
695 #if WTF_MIPS_ISA(1)
696         static const int patchOffsetPutByIdStructure = 16;
697         static const int patchOffsetPutByIdPropertyMapOffset = 68;
698         static const int patchOffsetGetByIdStructure = 16;
699         static const int patchOffsetGetByIdBranchToSlowCase = 48;
700         static const int patchOffsetGetByIdPropertyMapOffset = 68;
701         static const int patchOffsetGetByIdPutResult = 88;
702 #if ENABLE(OPCODE_SAMPLING)
703         #error "OPCODE_SAMPLING is not yet supported"
704 #else
705         static const int patchOffsetGetByIdSlowCaseCall = 40;
706 #endif
707         static const int patchOffsetOpCallCompareToJump = 32;
708         static const int patchOffsetMethodCheckProtoObj = 32;
709         static const int patchOffsetMethodCheckProtoStruct = 56;
710         static const int patchOffsetMethodCheckPutFunction = 88;
711 #else // WTF_MIPS_ISA(1)
712         static const int patchOffsetPutByIdStructure = 12;
713         static const int patchOffsetPutByIdPropertyMapOffset = 60;
714         static const int patchOffsetGetByIdStructure = 12;
715         static const int patchOffsetGetByIdBranchToSlowCase = 44;
716         static const int patchOffsetGetByIdPropertyMapOffset = 60;
717         static const int patchOffsetGetByIdPutResult = 76;
718 #if ENABLE(OPCODE_SAMPLING)
719         #error "OPCODE_SAMPLING is not yet supported"
720 #else
721         static const int patchOffsetGetByIdSlowCaseCall = 40;
722 #endif
723         static const int patchOffsetOpCallCompareToJump = 32;
724         static const int patchOffsetMethodCheckProtoObj = 32;
725         static const int patchOffsetMethodCheckProtoStruct = 52;
726         static const int patchOffsetMethodCheckPutFunction = 84;
727 #endif
728 #endif
729 #endif // USE(JSVALUE32_64)
730
731 #if (defined(ASSEMBLER_HAS_CONSTANT_POOL) && ASSEMBLER_HAS_CONSTANT_POOL)
732 #define BEGIN_UNINTERRUPTED_SEQUENCE(name) do { beginUninterruptedSequence(name ## InstructionSpace, name ## ConstantSpace); } while (false)
733 #define END_UNINTERRUPTED_SEQUENCE_FOR_PUT(name, dst) do { endUninterruptedSequence(name ## InstructionSpace, name ## ConstantSpace, dst); } while (false)
734 #define END_UNINTERRUPTED_SEQUENCE(name) END_UNINTERRUPTED_SEQUENCE_FOR_PUT(name, 0)
735
736         void beginUninterruptedSequence(int, int);
737         void endUninterruptedSequence(int, int, int);
738
739 #else
740 #define BEGIN_UNINTERRUPTED_SEQUENCE(name)  do { beginUninterruptedSequence(); } while (false)
741 #define END_UNINTERRUPTED_SEQUENCE(name)  do { endUninterruptedSequence(); } while (false)
742 #define END_UNINTERRUPTED_SEQUENCE_FOR_PUT(name, dst) do { endUninterruptedSequence(); } while (false)
743 #endif
744
745         void emit_compareAndJump(OpcodeID, unsigned op1, unsigned op2, unsigned target, RelationalCondition);
746         void emit_compareAndJumpSlow(unsigned op1, unsigned op2, unsigned target, DoubleCondition, int (JIT_STUB *stub)(STUB_ARGS_DECLARATION), bool invert, Vector<SlowCaseEntry>::iterator&);
747
748         void emit_op_add(Instruction*);
749         void emit_op_bitand(Instruction*);
750         void emit_op_bitnot(Instruction*);
751         void emit_op_bitor(Instruction*);
752         void emit_op_bitxor(Instruction*);
753         void emit_op_call(Instruction*);
754         void emit_op_call_eval(Instruction*);
755         void emit_op_call_varargs(Instruction*);
756         void emit_op_call_put_result(Instruction*);
757         void emit_op_catch(Instruction*);
758         void emit_op_construct(Instruction*);
759         void emit_op_get_callee(Instruction*);
760         void emit_op_create_this(Instruction*);
761         void emit_op_convert_this(Instruction*);
762         void emit_op_create_arguments(Instruction*);
763         void emit_op_debug(Instruction*);
764         void emit_op_del_by_id(Instruction*);
765         void emit_op_div(Instruction*);
766         void emit_op_end(Instruction*);
767         void emit_op_enter(Instruction*);
768         void emit_op_create_activation(Instruction*);
769         void emit_op_eq(Instruction*);
770         void emit_op_eq_null(Instruction*);
771         void emit_op_get_by_id(Instruction*);
772         void emit_op_get_arguments_length(Instruction*);
773         void emit_op_get_by_val(Instruction*);
774         void emit_op_get_argument_by_val(Instruction*);
775         void emit_op_get_by_pname(Instruction*);
776         void emit_op_get_global_var(Instruction*);
777         void emit_op_get_scoped_var(Instruction*);
778         void emit_op_init_lazy_reg(Instruction*);
779         void emit_op_check_has_instance(Instruction*);
780         void emit_op_instanceof(Instruction*);
781         void emit_op_jeq_null(Instruction*);
782         void emit_op_jfalse(Instruction*);
783         void emit_op_jmp(Instruction*);
784         void emit_op_jmp_scopes(Instruction*);
785         void emit_op_jneq_null(Instruction*);
786         void emit_op_jneq_ptr(Instruction*);
787         void emit_op_jless(Instruction*);
788         void emit_op_jlesseq(Instruction*);
789         void emit_op_jgreater(Instruction*);
790         void emit_op_jgreatereq(Instruction*);
791         void emit_op_jnless(Instruction*);
792         void emit_op_jnlesseq(Instruction*);
793         void emit_op_jngreater(Instruction*);
794         void emit_op_jngreatereq(Instruction*);
795         void emit_op_jsr(Instruction*);
796         void emit_op_jtrue(Instruction*);
797         void emit_op_load_varargs(Instruction*);
798         void emit_op_loop(Instruction*);
799         void emit_op_loop_hint(Instruction*);
800         void emit_op_loop_if_less(Instruction*);
801         void emit_op_loop_if_lesseq(Instruction*);
802         void emit_op_loop_if_greater(Instruction*);
803         void emit_op_loop_if_greatereq(Instruction*);
804         void emit_op_loop_if_true(Instruction*);
805         void emit_op_loop_if_false(Instruction*);
806         void emit_op_lshift(Instruction*);
807         void emit_op_method_check(Instruction*);
808         void emit_op_mod(Instruction*);
809         void emit_op_mov(Instruction*);
810         void emit_op_mul(Instruction*);
811         void emit_op_negate(Instruction*);
812         void emit_op_neq(Instruction*);
813         void emit_op_neq_null(Instruction*);
814         void emit_op_new_array(Instruction*);
815         void emit_op_new_array_buffer(Instruction*);
816         void emit_op_new_func(Instruction*);
817         void emit_op_new_func_exp(Instruction*);
818         void emit_op_new_object(Instruction*);
819         void emit_op_new_regexp(Instruction*);
820         void emit_op_get_pnames(Instruction*);
821         void emit_op_next_pname(Instruction*);
822         void emit_op_not(Instruction*);
823         void emit_op_nstricteq(Instruction*);
824         void emit_op_pop_scope(Instruction*);
825         void emit_op_post_dec(Instruction*);
826         void emit_op_post_inc(Instruction*);
827         void emit_op_pre_dec(Instruction*);
828         void emit_op_pre_inc(Instruction*);
829         void emit_op_profile_did_call(Instruction*);
830         void emit_op_profile_will_call(Instruction*);
831         void emit_op_push_new_scope(Instruction*);
832         void emit_op_push_scope(Instruction*);
833         void emit_op_put_by_id(Instruction*);
834         void emit_op_put_by_index(Instruction*);
835         void emit_op_put_by_val(Instruction*);
836         void emit_op_put_getter(Instruction*);
837         void emit_op_put_global_var(Instruction*);
838         void emit_op_put_scoped_var(Instruction*);
839         void emit_op_put_setter(Instruction*);
840         void emit_op_resolve(Instruction*);
841         void emit_op_resolve_base(Instruction*);
842         void emit_op_ensure_property_exists(Instruction*);
843         void emit_op_resolve_global(Instruction*, bool dynamic = false);
844         void emit_op_resolve_global_dynamic(Instruction*);
845         void emit_op_resolve_skip(Instruction*);
846         void emit_op_resolve_with_base(Instruction*);
847         void emit_op_resolve_with_this(Instruction*);
848         void emit_op_ret(Instruction*);
849         void emit_op_ret_object_or_this(Instruction*);
850         void emit_op_rshift(Instruction*);
851         void emit_op_sret(Instruction*);
852         void emit_op_strcat(Instruction*);
853         void emit_op_stricteq(Instruction*);
854         void emit_op_sub(Instruction*);
855         void emit_op_switch_char(Instruction*);
856         void emit_op_switch_imm(Instruction*);
857         void emit_op_switch_string(Instruction*);
858         void emit_op_tear_off_activation(Instruction*);
859         void emit_op_tear_off_arguments(Instruction*);
860         void emit_op_throw(Instruction*);
861         void emit_op_throw_reference_error(Instruction*);
862         void emit_op_to_jsnumber(Instruction*);
863         void emit_op_to_primitive(Instruction*);
864         void emit_op_unexpected_load(Instruction*);
865         void emit_op_urshift(Instruction*);
866 #if ENABLE(JIT_USE_SOFT_MODULO)
867         void softModulo();
868 #endif
869
870         void emitSlow_op_add(Instruction*, Vector<SlowCaseEntry>::iterator&);
871         void emitSlow_op_bitand(Instruction*, Vector<SlowCaseEntry>::iterator&);
872         void emitSlow_op_bitnot(Instruction*, Vector<SlowCaseEntry>::iterator&);
873         void emitSlow_op_bitor(Instruction*, Vector<SlowCaseEntry>::iterator&);
874         void emitSlow_op_bitxor(Instruction*, Vector<SlowCaseEntry>::iterator&);
875         void emitSlow_op_call(Instruction*, Vector<SlowCaseEntry>::iterator&);
876         void emitSlow_op_call_eval(Instruction*, Vector<SlowCaseEntry>::iterator&);
877         void emitSlow_op_call_varargs(Instruction*, Vector<SlowCaseEntry>::iterator&);
878         void emitSlow_op_construct(Instruction*, Vector<SlowCaseEntry>::iterator&);
879         void emitSlow_op_convert_this(Instruction*, Vector<SlowCaseEntry>::iterator&);
880         void emitSlow_op_create_this(Instruction*, Vector<SlowCaseEntry>::iterator&);
881         void emitSlow_op_div(Instruction*, Vector<SlowCaseEntry>::iterator&);
882         void emitSlow_op_eq(Instruction*, Vector<SlowCaseEntry>::iterator&);
883         void emitSlow_op_get_by_id(Instruction*, Vector<SlowCaseEntry>::iterator&);
884         void emitSlow_op_get_arguments_length(Instruction*, Vector<SlowCaseEntry>::iterator&);
885         void emitSlow_op_get_by_val(Instruction*, Vector<SlowCaseEntry>::iterator&);
886         void emitSlow_op_get_argument_by_val(Instruction*, Vector<SlowCaseEntry>::iterator&);
887         void emitSlow_op_get_by_pname(Instruction*, Vector<SlowCaseEntry>::iterator&);
888         void emitSlow_op_check_has_instance(Instruction*, Vector<SlowCaseEntry>::iterator&);
889         void emitSlow_op_instanceof(Instruction*, Vector<SlowCaseEntry>::iterator&);
890         void emitSlow_op_jfalse(Instruction*, Vector<SlowCaseEntry>::iterator&);
891         void emitSlow_op_jless(Instruction*, Vector<SlowCaseEntry>::iterator&);
892         void emitSlow_op_jlesseq(Instruction*, Vector<SlowCaseEntry>::iterator&);
893         void emitSlow_op_jgreater(Instruction*, Vector<SlowCaseEntry>::iterator&);
894         void emitSlow_op_jgreatereq(Instruction*, Vector<SlowCaseEntry>::iterator&);
895         void emitSlow_op_jnless(Instruction*, Vector<SlowCaseEntry>::iterator&);
896         void emitSlow_op_jnlesseq(Instruction*, Vector<SlowCaseEntry>::iterator&);
897         void emitSlow_op_jngreater(Instruction*, Vector<SlowCaseEntry>::iterator&);
898         void emitSlow_op_jngreatereq(Instruction*, Vector<SlowCaseEntry>::iterator&);
899         void emitSlow_op_jtrue(Instruction*, Vector<SlowCaseEntry>::iterator&);
900         void emitSlow_op_load_varargs(Instruction*, Vector<SlowCaseEntry>::iterator&);
901         void emitSlow_op_loop_if_less(Instruction*, Vector<SlowCaseEntry>::iterator&);
902         void emitSlow_op_loop_if_lesseq(Instruction*, Vector<SlowCaseEntry>::iterator&);
903         void emitSlow_op_loop_if_greater(Instruction*, Vector<SlowCaseEntry>::iterator&);
904         void emitSlow_op_loop_if_greatereq(Instruction*, Vector<SlowCaseEntry>::iterator&);
905         void emitSlow_op_loop_if_true(Instruction*, Vector<SlowCaseEntry>::iterator&);
906         void emitSlow_op_loop_if_false(Instruction*, Vector<SlowCaseEntry>::iterator&);
907         void emitSlow_op_lshift(Instruction*, Vector<SlowCaseEntry>::iterator&);
908         void emitSlow_op_method_check(Instruction*, Vector<SlowCaseEntry>::iterator&);
909         void emitSlow_op_mod(Instruction*, Vector<SlowCaseEntry>::iterator&);
910         void emitSlow_op_mul(Instruction*, Vector<SlowCaseEntry>::iterator&);
911         void emitSlow_op_negate(Instruction*, Vector<SlowCaseEntry>::iterator&);
912         void emitSlow_op_neq(Instruction*, Vector<SlowCaseEntry>::iterator&);
913         void emitSlow_op_new_object(Instruction*, Vector<SlowCaseEntry>::iterator&);
914         void emitSlow_op_not(Instruction*, Vector<SlowCaseEntry>::iterator&);
915         void emitSlow_op_nstricteq(Instruction*, Vector<SlowCaseEntry>::iterator&);
916         void emitSlow_op_post_dec(Instruction*, Vector<SlowCaseEntry>::iterator&);
917         void emitSlow_op_post_inc(Instruction*, Vector<SlowCaseEntry>::iterator&);
918         void emitSlow_op_pre_dec(Instruction*, Vector<SlowCaseEntry>::iterator&);
919         void emitSlow_op_pre_inc(Instruction*, Vector<SlowCaseEntry>::iterator&);
920         void emitSlow_op_put_by_id(Instruction*, Vector<SlowCaseEntry>::iterator&);
921         void emitSlow_op_put_by_val(Instruction*, Vector<SlowCaseEntry>::iterator&);
922         void emitSlow_op_resolve_global(Instruction*, Vector<SlowCaseEntry>::iterator&);
923         void emitSlow_op_resolve_global_dynamic(Instruction*, Vector<SlowCaseEntry>::iterator&);
924         void emitSlow_op_rshift(Instruction*, Vector<SlowCaseEntry>::iterator&);
925         void emitSlow_op_stricteq(Instruction*, Vector<SlowCaseEntry>::iterator&);
926         void emitSlow_op_sub(Instruction*, Vector<SlowCaseEntry>::iterator&);
927         void emitSlow_op_to_jsnumber(Instruction*, Vector<SlowCaseEntry>::iterator&);
928         void emitSlow_op_to_primitive(Instruction*, Vector<SlowCaseEntry>::iterator&);
929         void emitSlow_op_urshift(Instruction*, Vector<SlowCaseEntry>::iterator&);
930         void emitSlow_op_new_func(Instruction*, Vector<SlowCaseEntry>::iterator&);
931         void emitSlow_op_new_func_exp(Instruction*, Vector<SlowCaseEntry>::iterator&);
932
933         
934         void emitRightShift(Instruction*, bool isUnsigned);
935         void emitRightShiftSlowCase(Instruction*, Vector<SlowCaseEntry>::iterator&, bool isUnsigned);
936
937         /* This function is deprecated. */
938         void emitGetJITStubArg(unsigned argumentNumber, RegisterID dst);
939
940         void emitInitRegister(unsigned dst);
941
942         void emitPutToCallFrameHeader(RegisterID from, RegisterFile::CallFrameHeaderEntry entry);
943         void emitPutCellToCallFrameHeader(RegisterID from, RegisterFile::CallFrameHeaderEntry);
944         void emitPutIntToCallFrameHeader(RegisterID from, RegisterFile::CallFrameHeaderEntry);
945         void emitPutImmediateToCallFrameHeader(void* value, RegisterFile::CallFrameHeaderEntry entry);
946         void emitGetFromCallFrameHeaderPtr(RegisterFile::CallFrameHeaderEntry entry, RegisterID to, RegisterID from = callFrameRegister);
947         void emitGetFromCallFrameHeader32(RegisterFile::CallFrameHeaderEntry entry, RegisterID to, RegisterID from = callFrameRegister);
948
949         JSValue getConstantOperand(unsigned src);
950         bool isOperandConstantImmediateInt(unsigned src);
951         bool isOperandConstantImmediateChar(unsigned src);
952
953         bool atJumpTarget();
954
955         Jump getSlowCase(Vector<SlowCaseEntry>::iterator& iter)
956         {
957             return iter++->from;
958         }
959         void linkSlowCase(Vector<SlowCaseEntry>::iterator& iter)
960         {
961             iter->from.link(this);
962             ++iter;
963         }
964         void linkSlowCaseIfNotJSCell(Vector<SlowCaseEntry>::iterator&, int vReg);
965
966         Jump checkStructure(RegisterID reg, Structure* structure);
967
968         void restoreArgumentReference();
969         void restoreArgumentReferenceForTrampoline();
970         void updateTopCallFrame();
971
972         Call emitNakedCall(CodePtr function = CodePtr());
973
974         void preserveReturnAddressAfterCall(RegisterID);
975         void restoreReturnAddressBeforeReturn(RegisterID);
976         void restoreReturnAddressBeforeReturn(Address);
977
978         // Loads the character value of a single character string into dst.
979         void emitLoadCharacterString(RegisterID src, RegisterID dst, JumpList& failures);
980         
981         enum OptimizationCheckKind { LoopOptimizationCheck, RetOptimizationCheck };
982 #if ENABLE(TIERED_COMPILATION)
983         void emitOptimizationCheck(OptimizationCheckKind);
984 #else
985         void emitOptimizationCheck(OptimizationCheckKind) { }
986 #endif
987         
988         void emitTimeoutCheck();
989 #ifndef NDEBUG
990         void printBytecodeOperandTypes(unsigned src1, unsigned src2);
991 #endif
992
993 #if ENABLE(SAMPLING_FLAGS)
994         void setSamplingFlag(int32_t);
995         void clearSamplingFlag(int32_t);
996 #endif
997
998 #if ENABLE(SAMPLING_COUNTERS)
999         void emitCount(AbstractSamplingCounter&, uint32_t = 1);
1000 #endif
1001
1002 #if ENABLE(OPCODE_SAMPLING)
1003         void sampleInstruction(Instruction*, bool = false);
1004 #endif
1005
1006 #if ENABLE(CODEBLOCK_SAMPLING)
1007         void sampleCodeBlock(CodeBlock*);
1008 #else
1009         void sampleCodeBlock(CodeBlock*) {}
1010 #endif
1011
1012 #if ENABLE(TIERED_COMPILATION)
1013         bool shouldEmitProfiling() { return m_canBeOptimized; }
1014 #else
1015         // Enables use of value profiler with tiered compilation turned off,
1016         // in which case all code gets profiled.
1017         bool shouldEmitProfiling() { return true; }
1018 #endif
1019
1020         Interpreter* m_interpreter;
1021         JSGlobalData* m_globalData;
1022         CodeBlock* m_codeBlock;
1023
1024         Vector<CallRecord> m_calls;
1025         Vector<Label> m_labels;
1026         Vector<PropertyStubCompilationInfo> m_propertyAccessCompilationInfo;
1027         Vector<StructureStubCompilationInfo> m_callStructureStubCompilationInfo;
1028         Vector<MethodCallCompilationInfo> m_methodCallCompilationInfo;
1029         Vector<JumpTable> m_jmpTable;
1030
1031         unsigned m_bytecodeOffset;
1032         Vector<JSRInfo> m_jsrSites;
1033         Vector<SlowCaseEntry> m_slowCases;
1034         Vector<SwitchRecord> m_switches;
1035
1036         unsigned m_propertyAccessInstructionIndex;
1037         unsigned m_globalResolveInfoIndex;
1038         unsigned m_callLinkInfoIndex;
1039
1040 #if USE(JSVALUE32_64)
1041         unsigned m_jumpTargetIndex;
1042         unsigned m_mappedBytecodeOffset;
1043         unsigned m_mappedVirtualRegisterIndex;
1044         RegisterID m_mappedTag;
1045         RegisterID m_mappedPayload;
1046 #else
1047         int m_lastResultBytecodeRegister;
1048 #endif
1049         unsigned m_jumpTargetsPosition;
1050
1051 #ifndef NDEBUG
1052 #if defined(ASSEMBLER_HAS_CONSTANT_POOL) && ASSEMBLER_HAS_CONSTANT_POOL
1053         Label m_uninterruptedInstructionSequenceBegin;
1054         int m_uninterruptedConstantSequenceBegin;
1055 #endif
1056 #endif
1057         WeakRandom m_randomGenerator;
1058         static CodeRef stringGetByValStubGenerator(JSGlobalData*);
1059         
1060 #if ENABLE(TIERED_COMPILATION)
1061         bool m_canBeOptimized;
1062         Label m_startOfCode;
1063         CompactJITCodeMap::Encoder m_jitCodeMapEncoder;
1064 #endif
1065     } JIT_CLASS_ALIGNMENT;
1066
1067     inline void JIT::emit_op_loop(Instruction* currentInstruction)
1068     {
1069         emitTimeoutCheck();
1070         emit_op_jmp(currentInstruction);
1071     }
1072
1073     inline void JIT::emit_op_loop_hint(Instruction*)
1074     {
1075         emitOptimizationCheck(LoopOptimizationCheck);
1076     }
1077
1078     inline void JIT::emit_op_loop_if_true(Instruction* currentInstruction)
1079     {
1080         emitTimeoutCheck();
1081         emit_op_jtrue(currentInstruction);
1082     }
1083
1084     inline void JIT::emitSlow_op_loop_if_true(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1085     {
1086         emitSlow_op_jtrue(currentInstruction, iter);
1087     }
1088
1089     inline void JIT::emit_op_loop_if_false(Instruction* currentInstruction)
1090     {
1091         emitTimeoutCheck();
1092         emit_op_jfalse(currentInstruction);
1093     }
1094
1095     inline void JIT::emitSlow_op_loop_if_false(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1096     {
1097         emitSlow_op_jfalse(currentInstruction, iter);
1098     }
1099
1100     inline void JIT::emit_op_loop_if_less(Instruction* currentInstruction)
1101     {
1102         emitTimeoutCheck();
1103         emit_op_jless(currentInstruction);
1104     }
1105
1106     inline void JIT::emitSlow_op_loop_if_less(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1107     {
1108         emitSlow_op_jless(currentInstruction, iter);
1109     }
1110
1111     inline void JIT::emit_op_loop_if_lesseq(Instruction* currentInstruction)
1112     {
1113         emitTimeoutCheck();
1114         emit_op_jlesseq(currentInstruction);
1115     }
1116
1117     inline void JIT::emitSlow_op_loop_if_lesseq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1118     {
1119         emitSlow_op_jlesseq(currentInstruction, iter);
1120     }
1121
1122     inline void JIT::emit_op_loop_if_greater(Instruction* currentInstruction)
1123     {
1124         emitTimeoutCheck();
1125         emit_op_jgreater(currentInstruction);
1126     }
1127
1128     inline void JIT::emitSlow_op_loop_if_greater(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1129     {
1130         emitSlow_op_jgreater(currentInstruction, iter);
1131     }
1132
1133     inline void JIT::emit_op_loop_if_greatereq(Instruction* currentInstruction)
1134     {
1135         emitTimeoutCheck();
1136         emit_op_jgreatereq(currentInstruction);
1137     }
1138
1139     inline void JIT::emitSlow_op_loop_if_greatereq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1140     {
1141         emitSlow_op_jgreatereq(currentInstruction, iter);
1142     }
1143
1144 } // namespace JSC
1145
1146 #endif // ENABLE(JIT)
1147
1148 #endif // JIT_h