initial import
[vuplus_webkit] / Source / JavaScriptCore / jit / JITPropertyAccess.cpp
1 /*
2  * Copyright (C) 2008, 2009 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27
28 #if ENABLE(JIT)
29 #include "JIT.h"
30
31 #include "CodeBlock.h"
32 #include "GetterSetter.h"
33 #include "JITInlineMethods.h"
34 #include "JITStubCall.h"
35 #include "JSArray.h"
36 #include "JSFunction.h"
37 #include "JSPropertyNameIterator.h"
38 #include "Interpreter.h"
39 #include "LinkBuffer.h"
40 #include "RepatchBuffer.h"
41 #include "ResultType.h"
42 #include "SamplingTool.h"
43
44 #ifndef NDEBUG
45 #include <stdio.h>
46 #endif
47
48 using namespace std;
49
50 namespace JSC {
51 #if USE(JSVALUE64)
52
53 JIT::CodeRef JIT::stringGetByValStubGenerator(JSGlobalData* globalData)
54 {
55     JSInterfaceJIT jit;
56     JumpList failures;
57     failures.append(jit.branchPtr(NotEqual, Address(regT0), TrustedImmPtr(globalData->jsStringVPtr)));
58     failures.append(jit.branchTest32(NonZero, Address(regT0, OBJECT_OFFSETOF(JSString, m_fiberCount))));
59
60     // Load string length to regT2, and start the process of loading the data pointer into regT0
61     jit.load32(Address(regT0, ThunkHelpers::jsStringLengthOffset()), regT2);
62     jit.loadPtr(Address(regT0, ThunkHelpers::jsStringValueOffset()), regT0);
63     jit.loadPtr(Address(regT0, ThunkHelpers::stringImplDataOffset()), regT0);
64     
65     // Do an unsigned compare to simultaneously filter negative indices as well as indices that are too large
66     failures.append(jit.branch32(AboveOrEqual, regT1, regT2));
67     
68     // Load the character
69     jit.load16(BaseIndex(regT0, regT1, TimesTwo, 0), regT0);
70     
71     failures.append(jit.branch32(AboveOrEqual, regT0, TrustedImm32(0x100)));
72     jit.move(TrustedImmPtr(globalData->smallStrings.singleCharacterStrings()), regT1);
73     jit.loadPtr(BaseIndex(regT1, regT0, ScalePtr, 0), regT0);
74     jit.ret();
75     
76     failures.link(&jit);
77     jit.move(TrustedImm32(0), regT0);
78     jit.ret();
79     
80     LinkBuffer patchBuffer(*globalData, &jit);
81     return patchBuffer.finalizeCode();
82 }
83
84 void JIT::emit_op_get_by_val(Instruction* currentInstruction)
85 {
86     unsigned dst = currentInstruction[1].u.operand;
87     unsigned base = currentInstruction[2].u.operand;
88     unsigned property = currentInstruction[3].u.operand;
89
90     emitGetVirtualRegisters(base, regT0, property, regT1);
91     emitJumpSlowCaseIfNotImmediateInteger(regT1);
92
93     // This is technically incorrect - we're zero-extending an int32.  On the hot path this doesn't matter.
94     // We check the value as if it was a uint32 against the m_vectorLength - which will always fail if
95     // number was signed since m_vectorLength is always less than intmax (since the total allocation
96     // size is always less than 4Gb).  As such zero extending wil have been correct (and extending the value
97     // to 64-bits is necessary since it's used in the address calculation.  We zero extend rather than sign
98     // extending since it makes it easier to re-tag the value in the slow case.
99     zeroExtend32ToPtr(regT1, regT1);
100
101     emitJumpSlowCaseIfNotJSCell(regT0, base);
102     addSlowCase(branchPtr(NotEqual, Address(regT0), TrustedImmPtr(m_globalData->jsArrayVPtr)));
103
104     loadPtr(Address(regT0, JSArray::storageOffset()), regT2);
105     addSlowCase(branch32(AboveOrEqual, regT1, Address(regT0, JSArray::vectorLengthOffset())));
106
107     loadPtr(BaseIndex(regT2, regT1, ScalePtr, OBJECT_OFFSETOF(ArrayStorage, m_vector[0])), regT0);
108     addSlowCase(branchTestPtr(Zero, regT0));
109
110     emitValueProfilingSite(FirstProfilingSite);
111     emitPutVirtualRegister(dst);
112 }
113
114 void JIT::emitSlow_op_get_by_val(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
115 {
116     unsigned dst = currentInstruction[1].u.operand;
117     unsigned base = currentInstruction[2].u.operand;
118     unsigned property = currentInstruction[3].u.operand;
119     
120     linkSlowCase(iter); // property int32 check
121     linkSlowCaseIfNotJSCell(iter, base); // base cell check
122     Jump nonCell = jump();
123     linkSlowCase(iter); // base array check
124     Jump notString = branchPtr(NotEqual, Address(regT0), TrustedImmPtr(m_globalData->jsStringVPtr));
125     emitNakedCall(CodeLocationLabel(m_globalData->getCTIStub(stringGetByValStubGenerator).code()));
126     Jump failed = branchTestPtr(Zero, regT0);
127     emitPutVirtualRegister(dst, regT0);
128     emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_get_by_val));
129     failed.link(this);
130     notString.link(this);
131     nonCell.link(this);
132     
133     linkSlowCase(iter); // vector length check
134     linkSlowCase(iter); // empty value
135     
136     JITStubCall stubCall(this, cti_op_get_by_val);
137     stubCall.addArgument(base, regT2);
138     stubCall.addArgument(property, regT2);
139     stubCall.call(dst);
140
141     emitValueProfilingSite(SubsequentProfilingSite);
142 }
143
144 void JIT::compileGetDirectOffset(RegisterID base, RegisterID result, RegisterID offset, RegisterID scratch)
145 {
146     loadPtr(Address(base, JSObject::offsetOfPropertyStorage()), scratch);
147     loadPtr(BaseIndex(scratch, offset, ScalePtr, 0), result);
148 }
149
150 void JIT::emit_op_get_by_pname(Instruction* currentInstruction)
151 {
152     unsigned dst = currentInstruction[1].u.operand;
153     unsigned base = currentInstruction[2].u.operand;
154     unsigned property = currentInstruction[3].u.operand;
155     unsigned expected = currentInstruction[4].u.operand;
156     unsigned iter = currentInstruction[5].u.operand;
157     unsigned i = currentInstruction[6].u.operand;
158
159     emitGetVirtualRegister(property, regT0);
160     addSlowCase(branchPtr(NotEqual, regT0, addressFor(expected)));
161     emitGetVirtualRegisters(base, regT0, iter, regT1);
162     emitJumpSlowCaseIfNotJSCell(regT0, base);
163
164     // Test base's structure
165     loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
166     addSlowCase(branchPtr(NotEqual, regT2, Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_cachedStructure))));
167     load32(addressFor(i), regT3);
168     sub32(TrustedImm32(1), regT3);
169     addSlowCase(branch32(AboveOrEqual, regT3, Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_numCacheableSlots))));
170     compileGetDirectOffset(regT0, regT0, regT3, regT1);
171
172     emitPutVirtualRegister(dst, regT0);
173 }
174
175 void JIT::emitSlow_op_get_by_pname(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
176 {
177     unsigned dst = currentInstruction[1].u.operand;
178     unsigned base = currentInstruction[2].u.operand;
179     unsigned property = currentInstruction[3].u.operand;
180
181     linkSlowCase(iter);
182     linkSlowCaseIfNotJSCell(iter, base);
183     linkSlowCase(iter);
184     linkSlowCase(iter);
185
186     JITStubCall stubCall(this, cti_op_get_by_val);
187     stubCall.addArgument(base, regT2);
188     stubCall.addArgument(property, regT2);
189     stubCall.call(dst);
190 }
191
192 void JIT::emit_op_put_by_val(Instruction* currentInstruction)
193 {
194     unsigned base = currentInstruction[1].u.operand;
195     unsigned property = currentInstruction[2].u.operand;
196     unsigned value = currentInstruction[3].u.operand;
197
198     emitGetVirtualRegisters(base, regT0, property, regT1);
199     emitJumpSlowCaseIfNotImmediateInteger(regT1);
200     // See comment in op_get_by_val.
201     zeroExtend32ToPtr(regT1, regT1);
202     emitJumpSlowCaseIfNotJSCell(regT0, base);
203     addSlowCase(branchPtr(NotEqual, Address(regT0), TrustedImmPtr(m_globalData->jsArrayVPtr)));
204     addSlowCase(branch32(AboveOrEqual, regT1, Address(regT0, JSArray::vectorLengthOffset())));
205
206     loadPtr(Address(regT0, JSArray::storageOffset()), regT2);
207     Jump empty = branchTestPtr(Zero, BaseIndex(regT2, regT1, ScalePtr, OBJECT_OFFSETOF(ArrayStorage, m_vector[0])));
208
209     Label storeResult(this);
210     emitGetVirtualRegister(value, regT0);
211     storePtr(regT0, BaseIndex(regT2, regT1, ScalePtr, OBJECT_OFFSETOF(ArrayStorage, m_vector[0])));
212     Jump end = jump();
213     
214     empty.link(this);
215     add32(TrustedImm32(1), Address(regT2, OBJECT_OFFSETOF(ArrayStorage, m_numValuesInVector)));
216     branch32(Below, regT1, Address(regT2, OBJECT_OFFSETOF(ArrayStorage, m_length))).linkTo(storeResult, this);
217
218     move(regT1, regT0);
219     add32(TrustedImm32(1), regT0);
220     store32(regT0, Address(regT2, OBJECT_OFFSETOF(ArrayStorage, m_length)));
221     jump().linkTo(storeResult, this);
222
223     end.link(this);
224 }
225
226 void JIT::emitSlow_op_put_by_val(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
227 {
228     unsigned base = currentInstruction[1].u.operand;
229     unsigned property = currentInstruction[2].u.operand;
230     unsigned value = currentInstruction[3].u.operand;
231
232     linkSlowCase(iter); // property int32 check
233     linkSlowCaseIfNotJSCell(iter, base); // base cell check
234     linkSlowCase(iter); // base not array check
235     linkSlowCase(iter); // in vector check
236
237     JITStubCall stubPutByValCall(this, cti_op_put_by_val);
238     stubPutByValCall.addArgument(regT0);
239     stubPutByValCall.addArgument(property, regT2);
240     stubPutByValCall.addArgument(value, regT2);
241     stubPutByValCall.call();
242 }
243
244 void JIT::emit_op_put_by_index(Instruction* currentInstruction)
245 {
246     JITStubCall stubCall(this, cti_op_put_by_index);
247     stubCall.addArgument(currentInstruction[1].u.operand, regT2);
248     stubCall.addArgument(TrustedImm32(currentInstruction[2].u.operand));
249     stubCall.addArgument(currentInstruction[3].u.operand, regT2);
250     stubCall.call();
251 }
252
253 void JIT::emit_op_put_getter(Instruction* currentInstruction)
254 {
255     JITStubCall stubCall(this, cti_op_put_getter);
256     stubCall.addArgument(currentInstruction[1].u.operand, regT2);
257     stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
258     stubCall.addArgument(currentInstruction[3].u.operand, regT2);
259     stubCall.call();
260 }
261
262 void JIT::emit_op_put_setter(Instruction* currentInstruction)
263 {
264     JITStubCall stubCall(this, cti_op_put_setter);
265     stubCall.addArgument(currentInstruction[1].u.operand, regT2);
266     stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
267     stubCall.addArgument(currentInstruction[3].u.operand, regT2);
268     stubCall.call();
269 }
270
271 void JIT::emit_op_del_by_id(Instruction* currentInstruction)
272 {
273     JITStubCall stubCall(this, cti_op_del_by_id);
274     stubCall.addArgument(currentInstruction[2].u.operand, regT2);
275     stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[3].u.operand)));
276     stubCall.call(currentInstruction[1].u.operand);
277 }
278
279 void JIT::emit_op_method_check(Instruction* currentInstruction)
280 {
281     // Assert that the following instruction is a get_by_id.
282     ASSERT(m_interpreter->getOpcodeID((currentInstruction + OPCODE_LENGTH(op_method_check))->u.opcode) == op_get_by_id);
283
284     currentInstruction += OPCODE_LENGTH(op_method_check);
285     unsigned resultVReg = currentInstruction[1].u.operand;
286     unsigned baseVReg = currentInstruction[2].u.operand;
287     Identifier* ident = &(m_codeBlock->identifier(currentInstruction[3].u.operand));
288
289     emitGetVirtualRegister(baseVReg, regT0);
290
291     // Do the method check - check the object & its prototype's structure inline (this is the common case).
292     m_methodCallCompilationInfo.append(MethodCallCompilationInfo(m_propertyAccessCompilationInfo.size()));
293     MethodCallCompilationInfo& info = m_methodCallCompilationInfo.last();
294
295     Jump notCell = emitJumpIfNotJSCell(regT0);
296
297     BEGIN_UNINTERRUPTED_SEQUENCE(sequenceMethodCheck);
298
299     Jump structureCheck = branchPtrWithPatch(NotEqual, Address(regT0, JSCell::structureOffset()), info.structureToCompare, TrustedImmPtr(reinterpret_cast<void*>(patchGetByIdDefaultStructure)));
300     DataLabelPtr protoStructureToCompare, protoObj = moveWithPatch(TrustedImmPtr(0), regT1);
301     Jump protoStructureCheck = branchPtrWithPatch(NotEqual, Address(regT1, JSCell::structureOffset()), protoStructureToCompare, TrustedImmPtr(reinterpret_cast<void*>(patchGetByIdDefaultStructure)));
302
303     // This will be relinked to load the function without doing a load.
304     DataLabelPtr putFunction = moveWithPatch(TrustedImmPtr(0), regT0);
305
306     END_UNINTERRUPTED_SEQUENCE(sequenceMethodCheck);
307
308     Jump match = jump();
309
310     ASSERT_JIT_OFFSET_UNUSED(protoObj, differenceBetween(info.structureToCompare, protoObj), patchOffsetMethodCheckProtoObj);
311     ASSERT_JIT_OFFSET(differenceBetween(info.structureToCompare, protoStructureToCompare), patchOffsetMethodCheckProtoStruct);
312     ASSERT_JIT_OFFSET_UNUSED(putFunction, differenceBetween(info.structureToCompare, putFunction), patchOffsetMethodCheckPutFunction);
313
314     // Link the failure cases here.
315     notCell.link(this);
316     structureCheck.link(this);
317     protoStructureCheck.link(this);
318
319     // Do a regular(ish) get_by_id (the slow case will be link to
320     // cti_op_get_by_id_method_check instead of cti_op_get_by_id.
321     compileGetByIdHotPath(baseVReg, ident);
322
323     match.link(this);
324     emitValueProfilingSite(FirstProfilingSite);
325     emitPutVirtualRegister(resultVReg);
326
327     // We've already generated the following get_by_id, so make sure it's skipped over.
328     m_bytecodeOffset += OPCODE_LENGTH(op_get_by_id);
329 }
330
331 void JIT::emitSlow_op_method_check(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
332 {
333     currentInstruction += OPCODE_LENGTH(op_method_check);
334     unsigned resultVReg = currentInstruction[1].u.operand;
335     unsigned baseVReg = currentInstruction[2].u.operand;
336     Identifier* ident = &(m_codeBlock->identifier(currentInstruction[3].u.operand));
337
338     compileGetByIdSlowCase(resultVReg, baseVReg, ident, iter, true);
339
340     // We've already generated the following get_by_id, so make sure it's skipped over.
341     m_bytecodeOffset += OPCODE_LENGTH(op_get_by_id);
342 }
343
344 void JIT::emit_op_get_by_id(Instruction* currentInstruction)
345 {
346     unsigned resultVReg = currentInstruction[1].u.operand;
347     unsigned baseVReg = currentInstruction[2].u.operand;
348     Identifier* ident = &(m_codeBlock->identifier(currentInstruction[3].u.operand));
349
350     emitGetVirtualRegister(baseVReg, regT0);
351     compileGetByIdHotPath(baseVReg, ident);
352     emitValueProfilingSite(FirstProfilingSite);
353     emitPutVirtualRegister(resultVReg);
354 }
355
356 void JIT::compileGetByIdHotPath(int baseVReg, Identifier*)
357 {
358     // As for put_by_id, get_by_id requires the offset of the Structure and the offset of the access to be patched.
359     // Additionally, for get_by_id we need patch the offset of the branch to the slow case (we patch this to jump
360     // to array-length / prototype access tranpolines, and finally we also the the property-map access offset as a label
361     // to jump back to if one of these trampolies finds a match.
362
363     emitJumpSlowCaseIfNotJSCell(regT0, baseVReg);
364
365     BEGIN_UNINTERRUPTED_SEQUENCE(sequenceGetByIdHotPath);
366
367     Label hotPathBegin(this);
368     m_propertyAccessCompilationInfo.append(PropertyStubCompilationInfo());
369     m_propertyAccessCompilationInfo.last().hotPathBegin = hotPathBegin;
370
371     DataLabelPtr structureToCompare;
372     Jump structureCheck = branchPtrWithPatch(NotEqual, Address(regT0, JSCell::structureOffset()), structureToCompare, TrustedImmPtr(reinterpret_cast<void*>(patchGetByIdDefaultStructure)));
373     addSlowCase(structureCheck);
374     ASSERT_JIT_OFFSET(differenceBetween(hotPathBegin, structureToCompare), patchOffsetGetByIdStructure);
375     ASSERT_JIT_OFFSET(differenceBetween(hotPathBegin, structureCheck), patchOffsetGetByIdBranchToSlowCase)
376
377     loadPtr(Address(regT0, JSObject::offsetOfPropertyStorage()), regT0);
378     DataLabelCompact displacementLabel = loadPtrWithCompactAddressOffsetPatch(Address(regT0, patchGetByIdDefaultOffset), regT0);
379     ASSERT_JIT_OFFSET_UNUSED(displacementLabel, differenceBetween(hotPathBegin, displacementLabel), patchOffsetGetByIdPropertyMapOffset);
380
381     Label putResult(this);
382
383     END_UNINTERRUPTED_SEQUENCE(sequenceGetByIdHotPath);
384
385     ASSERT_JIT_OFFSET(differenceBetween(hotPathBegin, putResult), patchOffsetGetByIdPutResult);
386 }
387
388 void JIT::emitSlow_op_get_by_id(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
389 {
390     unsigned resultVReg = currentInstruction[1].u.operand;
391     unsigned baseVReg = currentInstruction[2].u.operand;
392     Identifier* ident = &(m_codeBlock->identifier(currentInstruction[3].u.operand));
393
394     compileGetByIdSlowCase(resultVReg, baseVReg, ident, iter, false);
395     emitValueProfilingSite(SubsequentProfilingSite);
396 }
397
398 void JIT::compileGetByIdSlowCase(int resultVReg, int baseVReg, Identifier* ident, Vector<SlowCaseEntry>::iterator& iter, bool isMethodCheck)
399 {
400     // As for the hot path of get_by_id, above, we ensure that we can use an architecture specific offset
401     // so that we only need track one pointer into the slow case code - we track a pointer to the location
402     // of the call (which we can use to look up the patch information), but should a array-length or
403     // prototype access trampoline fail we want to bail out back to here.  To do so we can subtract back
404     // the distance from the call to the head of the slow case.
405
406     linkSlowCaseIfNotJSCell(iter, baseVReg);
407     linkSlowCase(iter);
408
409     BEGIN_UNINTERRUPTED_SEQUENCE(sequenceGetByIdSlowCase);
410
411 #ifndef NDEBUG
412     Label coldPathBegin(this);
413 #endif
414     JITStubCall stubCall(this, isMethodCheck ? cti_op_get_by_id_method_check : cti_op_get_by_id);
415     stubCall.addArgument(regT0);
416     stubCall.addArgument(TrustedImmPtr(ident));
417     Call call = stubCall.call(resultVReg);
418
419     END_UNINTERRUPTED_SEQUENCE(sequenceGetByIdSlowCase);
420
421     ASSERT_JIT_OFFSET(differenceBetween(coldPathBegin, call), patchOffsetGetByIdSlowCaseCall);
422
423     // Track the location of the call; this will be used to recover patch information.
424     m_propertyAccessCompilationInfo[m_propertyAccessInstructionIndex++].callReturnLocation = call;
425 }
426
427 void JIT::emit_op_put_by_id(Instruction* currentInstruction)
428 {
429     unsigned baseVReg = currentInstruction[1].u.operand;
430     unsigned valueVReg = currentInstruction[3].u.operand;
431
432     // In order to be able to patch both the Structure, and the object offset, we store one pointer,
433     // to just after the arguments have been loaded into registers 'hotPathBegin', and we generate code
434     // such that the Structure & offset are always at the same distance from this.
435
436     emitGetVirtualRegisters(baseVReg, regT0, valueVReg, regT1);
437
438     // Jump to a slow case if either the base object is an immediate, or if the Structure does not match.
439     emitJumpSlowCaseIfNotJSCell(regT0, baseVReg);
440
441     emitWriteBarrier(regT0, regT2, WriteBarrierForPropertyAccess);
442
443     BEGIN_UNINTERRUPTED_SEQUENCE(sequencePutById);
444
445     Label hotPathBegin(this);
446     m_propertyAccessCompilationInfo.append(PropertyStubCompilationInfo());
447     m_propertyAccessCompilationInfo.last().hotPathBegin = hotPathBegin;
448
449     // It is important that the following instruction plants a 32bit immediate, in order that it can be patched over.
450     DataLabelPtr structureToCompare;
451     addSlowCase(branchPtrWithPatch(NotEqual, Address(regT0, JSCell::structureOffset()), structureToCompare, TrustedImmPtr(reinterpret_cast<void*>(patchGetByIdDefaultStructure))));
452     ASSERT_JIT_OFFSET(differenceBetween(hotPathBegin, structureToCompare), patchOffsetPutByIdStructure);
453
454     loadPtr(Address(regT0, JSObject::offsetOfPropertyStorage()), regT0);
455     DataLabel32 displacementLabel = storePtrWithAddressOffsetPatch(regT1, Address(regT0, patchPutByIdDefaultOffset));
456
457     END_UNINTERRUPTED_SEQUENCE(sequencePutById);
458
459     ASSERT_JIT_OFFSET_UNUSED(displacementLabel, differenceBetween(hotPathBegin, displacementLabel), patchOffsetPutByIdPropertyMapOffset);
460 }
461
462 void JIT::emitSlow_op_put_by_id(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
463 {
464     unsigned baseVReg = currentInstruction[1].u.operand;
465     Identifier* ident = &(m_codeBlock->identifier(currentInstruction[2].u.operand));
466     unsigned direct = currentInstruction[8].u.operand;
467
468     linkSlowCaseIfNotJSCell(iter, baseVReg);
469     linkSlowCase(iter);
470
471     JITStubCall stubCall(this, direct ? cti_op_put_by_id_direct : cti_op_put_by_id);
472     stubCall.addArgument(regT0);
473     stubCall.addArgument(TrustedImmPtr(ident));
474     stubCall.addArgument(regT1);
475     Call call = stubCall.call();
476
477     // Track the location of the call; this will be used to recover patch information.
478     m_propertyAccessCompilationInfo[m_propertyAccessInstructionIndex++].callReturnLocation = call;
479 }
480
481 // Compile a store into an object's property storage.  May overwrite the
482 // value in objectReg.
483 void JIT::compilePutDirectOffset(RegisterID base, RegisterID value, Structure* structure, size_t cachedOffset)
484 {
485     int offset = cachedOffset * sizeof(JSValue);
486     if (structure->isUsingInlineStorage())
487         offset += JSObject::offsetOfInlineStorage();
488     else
489         loadPtr(Address(base, JSObject::offsetOfPropertyStorage()), base);
490     storePtr(value, Address(base, offset));
491 }
492
493 // Compile a load from an object's property storage.  May overwrite base.
494 void JIT::compileGetDirectOffset(RegisterID base, RegisterID result, Structure* structure, size_t cachedOffset)
495 {
496     int offset = cachedOffset * sizeof(JSValue);
497     if (structure->isUsingInlineStorage()) {
498         offset += JSObject::offsetOfInlineStorage();
499         loadPtr(Address(base, offset), result);
500     } else {
501         loadPtr(Address(base, JSObject::offsetOfPropertyStorage()), result);
502         loadPtr(Address(result, offset), result);
503     }
504 }
505
506 void JIT::compileGetDirectOffset(JSObject* base, RegisterID result, size_t cachedOffset)
507 {
508     loadPtr(static_cast<void*>(&base->m_propertyStorage[cachedOffset]), result);
509 }
510
511 void JIT::privateCompilePutByIdTransition(StructureStubInfo* stubInfo, Structure* oldStructure, Structure* newStructure, size_t cachedOffset, StructureChain* chain, ReturnAddressPtr returnAddress, bool direct)
512 {
513     JumpList failureCases;
514     // Check eax is an object of the right Structure.
515     failureCases.append(emitJumpIfNotJSCell(regT0));
516     failureCases.append(branchPtr(NotEqual, Address(regT0, JSCell::structureOffset()), TrustedImmPtr(oldStructure)));
517     testPrototype(oldStructure->storedPrototype(), failureCases);
518
519     // ecx = baseObject->m_structure
520     if (!direct) {
521         for (WriteBarrier<Structure>* it = chain->head(); *it; ++it)
522             testPrototype((*it)->storedPrototype(), failureCases);
523     }
524
525     Call callTarget;
526
527     // emit a call only if storage realloc is needed
528     bool willNeedStorageRealloc = oldStructure->propertyStorageCapacity() != newStructure->propertyStorageCapacity();
529     if (willNeedStorageRealloc) {
530         // This trampoline was called to like a JIT stub; before we can can call again we need to
531         // remove the return address from the stack, to prevent the stack from becoming misaligned.
532         preserveReturnAddressAfterCall(regT3);
533  
534         JITStubCall stubCall(this, cti_op_put_by_id_transition_realloc);
535         stubCall.skipArgument(); // base
536         stubCall.skipArgument(); // ident
537         stubCall.skipArgument(); // value
538         stubCall.addArgument(TrustedImm32(oldStructure->propertyStorageCapacity()));
539         stubCall.addArgument(TrustedImm32(newStructure->propertyStorageCapacity()));
540         stubCall.call(regT0);
541         emitGetJITStubArg(2, regT1);
542
543         restoreReturnAddressBeforeReturn(regT3);
544     }
545     
546     emitWriteBarrier(regT0, regT2, WriteBarrierForPropertyAccess);
547
548     storePtr(TrustedImmPtr(newStructure), Address(regT0, JSCell::structureOffset()));
549     compilePutDirectOffset(regT0, regT1, newStructure, cachedOffset);
550
551     ret();
552     
553     ASSERT(!failureCases.empty());
554     failureCases.link(this);
555     restoreArgumentReferenceForTrampoline();
556     Call failureCall = tailRecursiveCall();
557
558     LinkBuffer patchBuffer(*m_globalData, this);
559
560     patchBuffer.link(failureCall, FunctionPtr(direct ? cti_op_put_by_id_direct_fail : cti_op_put_by_id_fail));
561
562     if (willNeedStorageRealloc) {
563         ASSERT(m_calls.size() == 1);
564         patchBuffer.link(m_calls[0].from, FunctionPtr(cti_op_put_by_id_transition_realloc));
565     }
566     
567     stubInfo->stubRoutine = patchBuffer.finalizeCode();
568     RepatchBuffer repatchBuffer(m_codeBlock);
569     repatchBuffer.relinkCallerToTrampoline(returnAddress, CodeLocationLabel(stubInfo->stubRoutine.code()));
570 }
571
572 void JIT::patchGetByIdSelf(CodeBlock* codeBlock, StructureStubInfo* stubInfo, Structure* structure, size_t cachedOffset, ReturnAddressPtr returnAddress)
573 {
574     RepatchBuffer repatchBuffer(codeBlock);
575
576     // We don't want to patch more than once - in future go to cti_op_get_by_id_generic.
577     // Should probably go to cti_op_get_by_id_fail, but that doesn't do anything interesting right now.
578     repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(cti_op_get_by_id_self_fail));
579
580     int offset = sizeof(JSValue) * cachedOffset;
581
582     // Patch the offset into the propoerty map to load from, then patch the Structure to look for.
583     repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelPtrAtOffset(patchOffsetGetByIdStructure), structure);
584     repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelCompactAtOffset(patchOffsetGetByIdPropertyMapOffset), offset);
585 }
586
587 void JIT::patchPutByIdReplace(CodeBlock* codeBlock, StructureStubInfo* stubInfo, Structure* structure, size_t cachedOffset, ReturnAddressPtr returnAddress, bool direct)
588 {
589     RepatchBuffer repatchBuffer(codeBlock);
590
591     // We don't want to patch more than once - in future go to cti_op_put_by_id_generic.
592     // Should probably go to cti_op_put_by_id_fail, but that doesn't do anything interesting right now.
593     repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(direct ? cti_op_put_by_id_direct_generic : cti_op_put_by_id_generic));
594
595     int offset = sizeof(JSValue) * cachedOffset;
596
597     // Patch the offset into the propoerty map to load from, then patch the Structure to look for.
598     repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelPtrAtOffset(patchOffsetPutByIdStructure), structure);
599     repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabel32AtOffset(patchOffsetPutByIdPropertyMapOffset), offset);
600 }
601
602 void JIT::privateCompilePatchGetArrayLength(ReturnAddressPtr returnAddress)
603 {
604     StructureStubInfo* stubInfo = &m_codeBlock->getStubInfo(returnAddress);
605
606     // Check eax is an array
607     Jump failureCases1 = branchPtr(NotEqual, Address(regT0), TrustedImmPtr(m_globalData->jsArrayVPtr));
608
609     // Checks out okay! - get the length from the storage
610     loadPtr(Address(regT0, JSArray::storageOffset()), regT3);
611     load32(Address(regT3, OBJECT_OFFSETOF(ArrayStorage, m_length)), regT2);
612     Jump failureCases2 = branch32(LessThan, regT2, TrustedImm32(0));
613
614     emitFastArithIntToImmNoCheck(regT2, regT0);
615     Jump success = jump();
616
617     LinkBuffer patchBuffer(*m_globalData, this);
618
619     // Use the patch information to link the failure cases back to the original slow case routine.
620     CodeLocationLabel slowCaseBegin = stubInfo->callReturnLocation.labelAtOffset(-patchOffsetGetByIdSlowCaseCall);
621     patchBuffer.link(failureCases1, slowCaseBegin);
622     patchBuffer.link(failureCases2, slowCaseBegin);
623
624     // On success return back to the hot patch code, at a point it will perform the store to dest for us.
625     patchBuffer.link(success, stubInfo->hotPathBegin.labelAtOffset(patchOffsetGetByIdPutResult));
626
627     // Track the stub we have created so that it will be deleted later.
628     stubInfo->stubRoutine = patchBuffer.finalizeCode();
629
630     // Finally patch the jump to slow case back in the hot path to jump here instead.
631     CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(patchOffsetGetByIdBranchToSlowCase);
632     RepatchBuffer repatchBuffer(m_codeBlock);
633     repatchBuffer.relink(jumpLocation, CodeLocationLabel(stubInfo->stubRoutine.code()));
634
635     // We don't want to patch more than once - in future go to cti_op_put_by_id_generic.
636     repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(cti_op_get_by_id_array_fail));
637 }
638
639 void JIT::privateCompileGetByIdProto(StructureStubInfo* stubInfo, Structure* structure, Structure* prototypeStructure, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset, ReturnAddressPtr returnAddress, CallFrame* callFrame)
640 {
641     // The prototype object definitely exists (if this stub exists the CodeBlock is referencing a Structure that is
642     // referencing the prototype object - let's speculatively load it's table nice and early!)
643     JSObject* protoObject = asObject(structure->prototypeForLookup(callFrame));
644
645     // Check eax is an object of the right Structure.
646     Jump failureCases1 = checkStructure(regT0, structure);
647
648     // Check the prototype object's Structure had not changed.
649     move(TrustedImmPtr(protoObject), regT3);
650     Jump failureCases2 = branchPtr(NotEqual, Address(regT3, JSCell::structureOffset()), TrustedImmPtr(prototypeStructure));
651
652     bool needsStubLink = false;
653     
654     // Checks out okay!
655     if (slot.cachedPropertyType() == PropertySlot::Getter) {
656         needsStubLink = true;
657         compileGetDirectOffset(protoObject, regT1, cachedOffset);
658         JITStubCall stubCall(this, cti_op_get_by_id_getter_stub);
659         stubCall.addArgument(regT1);
660         stubCall.addArgument(regT0);
661         stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
662         stubCall.call();
663     } else if (slot.cachedPropertyType() == PropertySlot::Custom) {
664         needsStubLink = true;
665         JITStubCall stubCall(this, cti_op_get_by_id_custom_stub);
666         stubCall.addArgument(TrustedImmPtr(protoObject));
667         stubCall.addArgument(TrustedImmPtr(FunctionPtr(slot.customGetter()).executableAddress()));
668         stubCall.addArgument(TrustedImmPtr(const_cast<Identifier*>(&ident)));
669         stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
670         stubCall.call();
671     } else
672         compileGetDirectOffset(protoObject, regT0, cachedOffset);
673     Jump success = jump();
674     LinkBuffer patchBuffer(*m_globalData, this);
675
676     // Use the patch information to link the failure cases back to the original slow case routine.
677     CodeLocationLabel slowCaseBegin = stubInfo->callReturnLocation.labelAtOffset(-patchOffsetGetByIdSlowCaseCall);
678     patchBuffer.link(failureCases1, slowCaseBegin);
679     patchBuffer.link(failureCases2, slowCaseBegin);
680
681     // On success return back to the hot patch code, at a point it will perform the store to dest for us.
682     patchBuffer.link(success, stubInfo->hotPathBegin.labelAtOffset(patchOffsetGetByIdPutResult));
683
684     if (needsStubLink) {
685         for (Vector<CallRecord>::iterator iter = m_calls.begin(); iter != m_calls.end(); ++iter) {
686             if (iter->to)
687                 patchBuffer.link(iter->from, FunctionPtr(iter->to));
688         }
689     }
690     // Track the stub we have created so that it will be deleted later.
691     stubInfo->stubRoutine = patchBuffer.finalizeCode();
692
693     // Finally patch the jump to slow case back in the hot path to jump here instead.
694     CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(patchOffsetGetByIdBranchToSlowCase);
695     RepatchBuffer repatchBuffer(m_codeBlock);
696     repatchBuffer.relink(jumpLocation, CodeLocationLabel(stubInfo->stubRoutine.code()));
697
698     // We don't want to patch more than once - in future go to cti_op_put_by_id_generic.
699     repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(cti_op_get_by_id_proto_list));
700 }
701
702 void JIT::privateCompileGetByIdSelfList(StructureStubInfo* stubInfo, PolymorphicAccessStructureList* polymorphicStructures, int currentIndex, Structure* structure, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset)
703 {
704     Jump failureCase = checkStructure(regT0, structure);
705     bool needsStubLink = false;
706     if (slot.cachedPropertyType() == PropertySlot::Getter) {
707         needsStubLink = true;
708         compileGetDirectOffset(regT0, regT1, structure, cachedOffset);
709         JITStubCall stubCall(this, cti_op_get_by_id_getter_stub);
710         stubCall.addArgument(regT1);
711         stubCall.addArgument(regT0);
712         stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
713         stubCall.call();
714     } else if (slot.cachedPropertyType() == PropertySlot::Custom) {
715         needsStubLink = true;
716         JITStubCall stubCall(this, cti_op_get_by_id_custom_stub);
717         stubCall.addArgument(regT0);
718         stubCall.addArgument(TrustedImmPtr(FunctionPtr(slot.customGetter()).executableAddress()));
719         stubCall.addArgument(TrustedImmPtr(const_cast<Identifier*>(&ident)));
720         stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
721         stubCall.call();
722     } else
723         compileGetDirectOffset(regT0, regT0, structure, cachedOffset);
724     Jump success = jump();
725
726     LinkBuffer patchBuffer(*m_globalData, this);
727
728     if (needsStubLink) {
729         for (Vector<CallRecord>::iterator iter = m_calls.begin(); iter != m_calls.end(); ++iter) {
730             if (iter->to)
731                 patchBuffer.link(iter->from, FunctionPtr(iter->to));
732         }
733     }
734
735     // Use the patch information to link the failure cases back to the original slow case routine.
736     CodeLocationLabel lastProtoBegin = CodeLocationLabel(polymorphicStructures->list[currentIndex - 1].stubRoutine.code());
737     if (!lastProtoBegin)
738         lastProtoBegin = stubInfo->callReturnLocation.labelAtOffset(-patchOffsetGetByIdSlowCaseCall);
739
740     patchBuffer.link(failureCase, lastProtoBegin);
741
742     // On success return back to the hot patch code, at a point it will perform the store to dest for us.
743     patchBuffer.link(success, stubInfo->hotPathBegin.labelAtOffset(patchOffsetGetByIdPutResult));
744
745     MacroAssemblerCodeRef stubCode = patchBuffer.finalizeCode();
746
747     polymorphicStructures->list[currentIndex].set(*m_globalData, m_codeBlock->ownerExecutable(), stubCode, structure);
748
749     // Finally patch the jump to slow case back in the hot path to jump here instead.
750     CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(patchOffsetGetByIdBranchToSlowCase);
751     RepatchBuffer repatchBuffer(m_codeBlock);
752     repatchBuffer.relink(jumpLocation, CodeLocationLabel(stubCode.code()));
753 }
754
755 void JIT::privateCompileGetByIdProtoList(StructureStubInfo* stubInfo, PolymorphicAccessStructureList* prototypeStructures, int currentIndex, Structure* structure, Structure* prototypeStructure, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset, CallFrame* callFrame)
756 {
757     // The prototype object definitely exists (if this stub exists the CodeBlock is referencing a Structure that is
758     // referencing the prototype object - let's speculatively load it's table nice and early!)
759     JSObject* protoObject = asObject(structure->prototypeForLookup(callFrame));
760
761     // Check eax is an object of the right Structure.
762     Jump failureCases1 = checkStructure(regT0, structure);
763
764     // Check the prototype object's Structure had not changed.
765     move(TrustedImmPtr(protoObject), regT3);
766     Jump failureCases2 = branchPtr(NotEqual, Address(regT3, JSCell::structureOffset()), TrustedImmPtr(prototypeStructure));
767
768     // Checks out okay!
769     bool needsStubLink = false;
770     if (slot.cachedPropertyType() == PropertySlot::Getter) {
771         needsStubLink = true;
772         compileGetDirectOffset(protoObject, regT1, cachedOffset);
773         JITStubCall stubCall(this, cti_op_get_by_id_getter_stub);
774         stubCall.addArgument(regT1);
775         stubCall.addArgument(regT0);
776         stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
777         stubCall.call();
778     } else if (slot.cachedPropertyType() == PropertySlot::Custom) {
779         needsStubLink = true;
780         JITStubCall stubCall(this, cti_op_get_by_id_custom_stub);
781         stubCall.addArgument(TrustedImmPtr(protoObject));
782         stubCall.addArgument(TrustedImmPtr(FunctionPtr(slot.customGetter()).executableAddress()));
783         stubCall.addArgument(TrustedImmPtr(const_cast<Identifier*>(&ident)));
784         stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
785         stubCall.call();
786     } else
787         compileGetDirectOffset(protoObject, regT0, cachedOffset);
788
789     Jump success = jump();
790
791     LinkBuffer patchBuffer(*m_globalData, this);
792
793     if (needsStubLink) {
794         for (Vector<CallRecord>::iterator iter = m_calls.begin(); iter != m_calls.end(); ++iter) {
795             if (iter->to)
796                 patchBuffer.link(iter->from, FunctionPtr(iter->to));
797         }
798     }
799
800     // Use the patch information to link the failure cases back to the original slow case routine.
801     CodeLocationLabel lastProtoBegin = CodeLocationLabel(prototypeStructures->list[currentIndex - 1].stubRoutine.code());
802     patchBuffer.link(failureCases1, lastProtoBegin);
803     patchBuffer.link(failureCases2, lastProtoBegin);
804
805     // On success return back to the hot patch code, at a point it will perform the store to dest for us.
806     patchBuffer.link(success, stubInfo->hotPathBegin.labelAtOffset(patchOffsetGetByIdPutResult));
807
808     MacroAssemblerCodeRef stubCode = patchBuffer.finalizeCode();
809     prototypeStructures->list[currentIndex].set(*m_globalData, m_codeBlock->ownerExecutable(), stubCode, structure, prototypeStructure);
810
811     // Finally patch the jump to slow case back in the hot path to jump here instead.
812     CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(patchOffsetGetByIdBranchToSlowCase);
813     RepatchBuffer repatchBuffer(m_codeBlock);
814     repatchBuffer.relink(jumpLocation, CodeLocationLabel(stubCode.code()));
815 }
816
817 void JIT::privateCompileGetByIdChainList(StructureStubInfo* stubInfo, PolymorphicAccessStructureList* prototypeStructures, int currentIndex, Structure* structure, StructureChain* chain, size_t count, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset, CallFrame* callFrame)
818 {
819     ASSERT(count);
820     JumpList bucketsOfFail;
821
822     // Check eax is an object of the right Structure.
823     Jump baseObjectCheck = checkStructure(regT0, structure);
824     bucketsOfFail.append(baseObjectCheck);
825
826     Structure* currStructure = structure;
827     WriteBarrier<Structure>* it = chain->head();
828     JSObject* protoObject = 0;
829     for (unsigned i = 0; i < count; ++i, ++it) {
830         protoObject = asObject(currStructure->prototypeForLookup(callFrame));
831         currStructure = it->get();
832         testPrototype(protoObject, bucketsOfFail);
833     }
834     ASSERT(protoObject);
835     
836     bool needsStubLink = false;
837     if (slot.cachedPropertyType() == PropertySlot::Getter) {
838         needsStubLink = true;
839         compileGetDirectOffset(protoObject, regT1, cachedOffset);
840         JITStubCall stubCall(this, cti_op_get_by_id_getter_stub);
841         stubCall.addArgument(regT1);
842         stubCall.addArgument(regT0);
843         stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
844         stubCall.call();
845     } else if (slot.cachedPropertyType() == PropertySlot::Custom) {
846         needsStubLink = true;
847         JITStubCall stubCall(this, cti_op_get_by_id_custom_stub);
848         stubCall.addArgument(TrustedImmPtr(protoObject));
849         stubCall.addArgument(TrustedImmPtr(FunctionPtr(slot.customGetter()).executableAddress()));
850         stubCall.addArgument(TrustedImmPtr(const_cast<Identifier*>(&ident)));
851         stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
852         stubCall.call();
853     } else
854         compileGetDirectOffset(protoObject, regT0, cachedOffset);
855     Jump success = jump();
856
857     LinkBuffer patchBuffer(*m_globalData, this);
858     
859     if (needsStubLink) {
860         for (Vector<CallRecord>::iterator iter = m_calls.begin(); iter != m_calls.end(); ++iter) {
861             if (iter->to)
862                 patchBuffer.link(iter->from, FunctionPtr(iter->to));
863         }
864     }
865
866     // Use the patch information to link the failure cases back to the original slow case routine.
867     CodeLocationLabel lastProtoBegin = CodeLocationLabel(prototypeStructures->list[currentIndex - 1].stubRoutine.code());
868
869     patchBuffer.link(bucketsOfFail, lastProtoBegin);
870
871     // On success return back to the hot patch code, at a point it will perform the store to dest for us.
872     patchBuffer.link(success, stubInfo->hotPathBegin.labelAtOffset(patchOffsetGetByIdPutResult));
873
874     CodeRef stubRoutine = patchBuffer.finalizeCode();
875
876     // Track the stub we have created so that it will be deleted later.
877     prototypeStructures->list[currentIndex].set(callFrame->globalData(), m_codeBlock->ownerExecutable(), stubRoutine, structure, chain);
878
879     // Finally patch the jump to slow case back in the hot path to jump here instead.
880     CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(patchOffsetGetByIdBranchToSlowCase);
881     RepatchBuffer repatchBuffer(m_codeBlock);
882     repatchBuffer.relink(jumpLocation, CodeLocationLabel(stubRoutine.code()));
883 }
884
885 void JIT::privateCompileGetByIdChain(StructureStubInfo* stubInfo, Structure* structure, StructureChain* chain, size_t count, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset, ReturnAddressPtr returnAddress, CallFrame* callFrame)
886 {
887     ASSERT(count);
888
889     JumpList bucketsOfFail;
890
891     // Check eax is an object of the right Structure.
892     bucketsOfFail.append(checkStructure(regT0, structure));
893
894     Structure* currStructure = structure;
895     WriteBarrier<Structure>* it = chain->head();
896     JSObject* protoObject = 0;
897     for (unsigned i = 0; i < count; ++i, ++it) {
898         protoObject = asObject(currStructure->prototypeForLookup(callFrame));
899         currStructure = it->get();
900         testPrototype(protoObject, bucketsOfFail);
901     }
902     ASSERT(protoObject);
903
904     bool needsStubLink = false;
905     if (slot.cachedPropertyType() == PropertySlot::Getter) {
906         needsStubLink = true;
907         compileGetDirectOffset(protoObject, regT1, cachedOffset);
908         JITStubCall stubCall(this, cti_op_get_by_id_getter_stub);
909         stubCall.addArgument(regT1);
910         stubCall.addArgument(regT0);
911         stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
912         stubCall.call();
913     } else if (slot.cachedPropertyType() == PropertySlot::Custom) {
914         needsStubLink = true;
915         JITStubCall stubCall(this, cti_op_get_by_id_custom_stub);
916         stubCall.addArgument(TrustedImmPtr(protoObject));
917         stubCall.addArgument(TrustedImmPtr(FunctionPtr(slot.customGetter()).executableAddress()));
918         stubCall.addArgument(TrustedImmPtr(const_cast<Identifier*>(&ident)));
919         stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
920         stubCall.call();
921     } else
922         compileGetDirectOffset(protoObject, regT0, cachedOffset);
923     Jump success = jump();
924
925     LinkBuffer patchBuffer(*m_globalData, this);
926
927     if (needsStubLink) {
928         for (Vector<CallRecord>::iterator iter = m_calls.begin(); iter != m_calls.end(); ++iter) {
929             if (iter->to)
930                 patchBuffer.link(iter->from, FunctionPtr(iter->to));
931         }
932     }
933
934     // Use the patch information to link the failure cases back to the original slow case routine.
935     patchBuffer.link(bucketsOfFail, stubInfo->callReturnLocation.labelAtOffset(-patchOffsetGetByIdSlowCaseCall));
936
937     // On success return back to the hot patch code, at a point it will perform the store to dest for us.
938     patchBuffer.link(success, stubInfo->hotPathBegin.labelAtOffset(patchOffsetGetByIdPutResult));
939
940     // Track the stub we have created so that it will be deleted later.
941     CodeRef stubRoutine = patchBuffer.finalizeCode();
942     stubInfo->stubRoutine = stubRoutine;
943
944     // Finally patch the jump to slow case back in the hot path to jump here instead.
945     CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(patchOffsetGetByIdBranchToSlowCase);
946     RepatchBuffer repatchBuffer(m_codeBlock);
947     repatchBuffer.relink(jumpLocation, CodeLocationLabel(stubRoutine.code()));
948
949     // We don't want to patch more than once - in future go to cti_op_put_by_id_generic.
950     repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(cti_op_get_by_id_proto_list));
951 }
952
953 void JIT::emit_op_get_scoped_var(Instruction* currentInstruction)
954 {
955     int skip = currentInstruction[3].u.operand;
956
957     emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT0);
958     bool checkTopLevel = m_codeBlock->codeType() == FunctionCode && m_codeBlock->needsFullScopeChain();
959     ASSERT(skip || !checkTopLevel);
960     if (checkTopLevel && skip--) {
961         Jump activationNotCreated;
962         if (checkTopLevel)
963             activationNotCreated = branchTestPtr(Zero, addressFor(m_codeBlock->activationRegister()));
964         loadPtr(Address(regT0, OBJECT_OFFSETOF(ScopeChainNode, next)), regT0);
965         activationNotCreated.link(this);
966     }
967     while (skip--)
968         loadPtr(Address(regT0, OBJECT_OFFSETOF(ScopeChainNode, next)), regT0);
969
970     loadPtr(Address(regT0, OBJECT_OFFSETOF(ScopeChainNode, object)), regT0);
971     loadPtr(Address(regT0, JSVariableObject::offsetOfRegisters()), regT0);
972     loadPtr(Address(regT0, currentInstruction[2].u.operand * sizeof(Register)), regT0);
973     emitPutVirtualRegister(currentInstruction[1].u.operand);
974 }
975
976 void JIT::emit_op_put_scoped_var(Instruction* currentInstruction)
977 {
978     int skip = currentInstruction[2].u.operand;
979
980     emitGetVirtualRegister(currentInstruction[3].u.operand, regT0);
981
982     emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1);
983     bool checkTopLevel = m_codeBlock->codeType() == FunctionCode && m_codeBlock->needsFullScopeChain();
984     ASSERT(skip || !checkTopLevel);
985     if (checkTopLevel && skip--) {
986         Jump activationNotCreated;
987         if (checkTopLevel)
988             activationNotCreated = branchTestPtr(Zero, addressFor(m_codeBlock->activationRegister()));
989         loadPtr(Address(regT1, OBJECT_OFFSETOF(ScopeChainNode, next)), regT1);
990         activationNotCreated.link(this);
991     }
992     while (skip--)
993         loadPtr(Address(regT1, OBJECT_OFFSETOF(ScopeChainNode, next)), regT1);
994     loadPtr(Address(regT1, OBJECT_OFFSETOF(ScopeChainNode, object)), regT1);
995
996     emitWriteBarrier(regT1, regT2, WriteBarrierForVariableAccess);
997
998     loadPtr(Address(regT1, JSVariableObject::offsetOfRegisters()), regT1);
999     storePtr(regT0, Address(regT1, currentInstruction[1].u.operand * sizeof(Register)));
1000 }
1001
1002 void JIT::emit_op_get_global_var(Instruction* currentInstruction)
1003 {
1004     JSVariableObject* globalObject = m_codeBlock->globalObject();
1005     loadPtr(&globalObject->m_registers, regT0);
1006     loadPtr(Address(regT0, currentInstruction[2].u.operand * sizeof(Register)), regT0);
1007     emitPutVirtualRegister(currentInstruction[1].u.operand);
1008 }
1009
1010 void JIT::emit_op_put_global_var(Instruction* currentInstruction)
1011 {
1012     JSGlobalObject* globalObject = m_codeBlock->globalObject();
1013
1014     emitGetVirtualRegister(currentInstruction[2].u.operand, regT0);
1015     move(TrustedImmPtr(globalObject), regT1);
1016     
1017     emitWriteBarrier(regT1, regT2, WriteBarrierForVariableAccess);
1018
1019     loadPtr(Address(regT1, JSVariableObject::offsetOfRegisters()), regT1);
1020     storePtr(regT0, Address(regT1, currentInstruction[1].u.operand * sizeof(Register)));
1021 }
1022
1023 void JIT::emitWriteBarrier(RegisterID owner, RegisterID scratch, WriteBarrierUseKind useKind)
1024 {
1025     UNUSED_PARAM(owner);
1026     UNUSED_PARAM(scratch);
1027     UNUSED_PARAM(useKind);
1028     ASSERT(owner != scratch);
1029     
1030 #if ENABLE(WRITE_BARRIER_PROFILING)
1031     emitCount(WriteBarrierCounters::jitCounterFor(useKind));
1032 #endif
1033 }
1034
1035 #endif // USE(JSVALUE64)
1036
1037 void JIT::testPrototype(JSValue prototype, JumpList& failureCases)
1038 {
1039     if (prototype.isNull())
1040         return;
1041
1042     ASSERT(prototype.isCell());
1043     move(TrustedImmPtr(prototype.asCell()), regT3);
1044     failureCases.append(branchPtr(NotEqual, Address(regT3, JSCell::structureOffset()), TrustedImmPtr(prototype.asCell()->structure())));
1045 }
1046
1047 void JIT::patchMethodCallProto(JSGlobalData& globalData, CodeBlock* codeBlock, MethodCallLinkInfo& methodCallLinkInfo, JSObject* callee, Structure* structure, JSObject* proto, ReturnAddressPtr returnAddress)
1048 {
1049     RepatchBuffer repatchBuffer(codeBlock);
1050     
1051     ASSERT(!methodCallLinkInfo.cachedStructure);
1052     CodeLocationDataLabelPtr structureLocation = methodCallLinkInfo.cachedStructure.location();
1053     methodCallLinkInfo.cachedStructure.set(globalData, structureLocation, codeBlock->ownerExecutable(), structure);
1054     
1055     Structure* prototypeStructure = proto->structure();
1056     methodCallLinkInfo.cachedPrototypeStructure.set(globalData, structureLocation.dataLabelPtrAtOffset(patchOffsetMethodCheckProtoStruct), codeBlock->ownerExecutable(), prototypeStructure);
1057     methodCallLinkInfo.cachedPrototype.set(globalData, structureLocation.dataLabelPtrAtOffset(patchOffsetMethodCheckProtoObj), codeBlock->ownerExecutable(), proto);
1058     methodCallLinkInfo.cachedFunction.set(globalData, structureLocation.dataLabelPtrAtOffset(patchOffsetMethodCheckPutFunction), codeBlock->ownerExecutable(), callee);
1059     repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(cti_op_get_by_id));
1060 }
1061
1062 } // namespace JSC
1063
1064 #endif // ENABLE(JIT)