2 * Copyright (C) 2011 Apple Inc. All rights reserved.
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 #include "DFGJITCodeGenerator.h"
31 #include "DFGNonSpeculativeJIT.h"
32 #include "DFGSpeculativeJIT.h"
33 #include "LinkBuffer.h"
35 namespace JSC { namespace DFG {
37 void JITCodeGenerator::clearGenerationInfo()
39 for (unsigned i = 0; i < m_generationInfo.size(); ++i)
40 m_generationInfo[i] = GenerationInfo();
41 m_gprs = RegisterBank<GPRInfo>();
42 m_fprs = RegisterBank<FPRInfo>();
45 GPRReg JITCodeGenerator::fillInteger(NodeIndex nodeIndex, DataFormat& returnFormat)
47 Node& node = m_jit.graph()[nodeIndex];
48 VirtualRegister virtualRegister = node.virtualRegister();
49 GenerationInfo& info = m_generationInfo[virtualRegister];
51 if (info.registerFormat() == DataFormatNone) {
52 GPRReg gpr = allocate();
54 if (node.isConstant()) {
55 m_gprs.retain(gpr, virtualRegister, SpillOrderConstant);
56 if (isInt32Constant(nodeIndex)) {
57 m_jit.move(MacroAssembler::Imm32(valueOfInt32Constant(nodeIndex)), gpr);
58 info.fillInteger(gpr);
59 returnFormat = DataFormatInteger;
62 if (isNumberConstant(nodeIndex)) {
63 JSValue jsValue = jsNumber(valueOfNumberConstant(nodeIndex));
64 m_jit.move(MacroAssembler::ImmPtr(JSValue::encode(jsValue)), gpr);
66 ASSERT(isJSConstant(nodeIndex));
67 JSValue jsValue = valueOfJSConstant(nodeIndex);
68 m_jit.move(MacroAssembler::ImmPtr(JSValue::encode(jsValue)), gpr);
71 ASSERT(info.spillFormat() == DataFormatJS || info.spillFormat() == DataFormatJSInteger);
72 m_gprs.retain(gpr, virtualRegister, SpillOrderSpilled);
73 m_jit.loadPtr(JITCompiler::addressFor(virtualRegister), gpr);
76 // Since we statically know that we're filling an integer, and values
77 // in the RegisterFile are boxed, this must be DataFormatJSInteger.
78 // We will check this with a jitAssert below.
79 info.fillJSValue(gpr, DataFormatJSInteger);
83 switch (info.registerFormat()) {
85 // Should have filled, above.
86 case DataFormatJSDouble:
87 case DataFormatDouble:
90 case DataFormatJSCell:
91 case DataFormatBoolean:
92 case DataFormatJSBoolean:
93 // Should only be calling this function if we know this operand to be integer.
96 case DataFormatJSInteger: {
97 GPRReg gpr = info.gpr();
99 m_jit.jitAssertIsJSInt32(gpr);
100 returnFormat = DataFormatJSInteger;
104 case DataFormatInteger: {
105 GPRReg gpr = info.gpr();
107 m_jit.jitAssertIsInt32(gpr);
108 returnFormat = DataFormatInteger;
113 ASSERT_NOT_REACHED();
114 return InvalidGPRReg;
117 FPRReg JITCodeGenerator::fillDouble(NodeIndex nodeIndex)
119 Node& node = m_jit.graph()[nodeIndex];
120 VirtualRegister virtualRegister = node.virtualRegister();
121 GenerationInfo& info = m_generationInfo[virtualRegister];
123 if (info.registerFormat() == DataFormatNone) {
124 GPRReg gpr = allocate();
126 if (node.isConstant()) {
127 if (isInt32Constant(nodeIndex)) {
128 // FIXME: should not be reachable?
129 m_jit.move(MacroAssembler::Imm32(valueOfInt32Constant(nodeIndex)), gpr);
130 m_gprs.retain(gpr, virtualRegister, SpillOrderConstant);
131 info.fillInteger(gpr);
133 } else if (isNumberConstant(nodeIndex)) {
134 FPRReg fpr = fprAllocate();
135 m_jit.move(MacroAssembler::ImmPtr(reinterpret_cast<void*>(reinterpretDoubleToIntptr(valueOfNumberConstant(nodeIndex)))), gpr);
136 m_jit.movePtrToDouble(gpr, fpr);
139 m_fprs.retain(fpr, virtualRegister, SpillOrderDouble);
140 info.fillDouble(fpr);
143 // FIXME: should not be reachable?
144 ASSERT(isJSConstant(nodeIndex));
145 JSValue jsValue = valueOfJSConstant(nodeIndex);
146 m_jit.move(MacroAssembler::ImmPtr(JSValue::encode(jsValue)), gpr);
147 m_gprs.retain(gpr, virtualRegister, SpillOrderConstant);
148 info.fillJSValue(gpr, DataFormatJS);
152 DataFormat spillFormat = info.spillFormat();
153 ASSERT(spillFormat & DataFormatJS);
154 m_gprs.retain(gpr, virtualRegister, SpillOrderSpilled);
155 m_jit.loadPtr(JITCompiler::addressFor(virtualRegister), gpr);
156 info.fillJSValue(gpr, m_isSpeculative ? spillFormat : DataFormatJS);
161 switch (info.registerFormat()) {
163 // Should have filled, above.
165 case DataFormatJSCell:
166 case DataFormatBoolean:
167 case DataFormatJSBoolean:
168 // Should only be calling this function if we know this operand to be numeric.
169 ASSERT_NOT_REACHED();
172 GPRReg jsValueGpr = info.gpr();
173 m_gprs.lock(jsValueGpr);
174 FPRReg fpr = fprAllocate();
175 GPRReg tempGpr = allocate(); // FIXME: can we skip this allocation on the last use of the virtual register?
177 JITCompiler::Jump isInteger = m_jit.branchPtr(MacroAssembler::AboveOrEqual, jsValueGpr, GPRInfo::tagTypeNumberRegister);
179 m_jit.jitAssertIsJSDouble(jsValueGpr);
181 // First, if we get here we have a double encoded as a JSValue
182 m_jit.move(jsValueGpr, tempGpr);
183 unboxDouble(tempGpr, fpr);
184 JITCompiler::Jump hasUnboxedDouble = m_jit.jump();
186 // Finally, handle integers.
187 isInteger.link(&m_jit);
188 m_jit.convertInt32ToDouble(jsValueGpr, fpr);
189 hasUnboxedDouble.link(&m_jit);
191 m_gprs.release(jsValueGpr);
192 m_gprs.unlock(jsValueGpr);
193 m_gprs.unlock(tempGpr);
194 m_fprs.retain(fpr, virtualRegister, SpillOrderDouble);
195 info.fillDouble(fpr);
200 case DataFormatJSInteger:
201 case DataFormatInteger: {
202 FPRReg fpr = fprAllocate();
203 GPRReg gpr = info.gpr();
205 m_jit.convertInt32ToDouble(gpr, fpr);
211 case DataFormatJSDouble: {
212 GPRReg gpr = info.gpr();
213 FPRReg fpr = fprAllocate();
214 if (m_gprs.isLocked(gpr)) {
215 // Make sure we don't trample gpr if it is in use.
216 GPRReg temp = allocate();
217 m_jit.move(gpr, temp);
218 unboxDouble(temp, fpr);
221 unboxDouble(gpr, fpr);
224 m_fprs.retain(fpr, virtualRegister, SpillOrderDouble);
226 info.fillDouble(fpr);
230 case DataFormatDouble: {
231 FPRReg fpr = info.fpr();
237 ASSERT_NOT_REACHED();
238 return InvalidFPRReg;
241 GPRReg JITCodeGenerator::fillJSValue(NodeIndex nodeIndex)
243 Node& node = m_jit.graph()[nodeIndex];
244 VirtualRegister virtualRegister = node.virtualRegister();
245 GenerationInfo& info = m_generationInfo[virtualRegister];
247 switch (info.registerFormat()) {
248 case DataFormatNone: {
249 GPRReg gpr = allocate();
251 if (node.isConstant()) {
252 if (isInt32Constant(nodeIndex)) {
253 info.fillJSValue(gpr, DataFormatJSInteger);
254 JSValue jsValue = jsNumber(valueOfInt32Constant(nodeIndex));
255 m_jit.move(MacroAssembler::ImmPtr(JSValue::encode(jsValue)), gpr);
256 } else if (isNumberConstant(nodeIndex)) {
257 info.fillJSValue(gpr, DataFormatJSDouble);
258 JSValue jsValue(JSValue::EncodeAsDouble, valueOfNumberConstant(nodeIndex));
259 m_jit.move(MacroAssembler::ImmPtr(JSValue::encode(jsValue)), gpr);
261 ASSERT(isJSConstant(nodeIndex));
262 JSValue jsValue = valueOfJSConstant(nodeIndex);
263 m_jit.move(MacroAssembler::ImmPtr(JSValue::encode(jsValue)), gpr);
264 info.fillJSValue(gpr, DataFormatJS);
267 m_gprs.retain(gpr, virtualRegister, SpillOrderConstant);
269 DataFormat spillFormat = info.spillFormat();
270 ASSERT(spillFormat & DataFormatJS);
271 m_gprs.retain(gpr, virtualRegister, SpillOrderSpilled);
272 m_jit.loadPtr(JITCompiler::addressFor(virtualRegister), gpr);
273 info.fillJSValue(gpr, m_isSpeculative ? spillFormat : DataFormatJS);
278 case DataFormatInteger: {
279 GPRReg gpr = info.gpr();
280 // If the register has already been locked we need to take a copy.
281 // If not, we'll zero extend in place, so mark on the info that this is now type DataFormatInteger, not DataFormatJSInteger.
282 if (m_gprs.isLocked(gpr)) {
283 GPRReg result = allocate();
284 m_jit.orPtr(GPRInfo::tagTypeNumberRegister, gpr, result);
288 m_jit.orPtr(GPRInfo::tagTypeNumberRegister, gpr);
289 info.fillJSValue(gpr, DataFormatJSInteger);
293 case DataFormatDouble: {
294 FPRReg fpr = info.fpr();
295 GPRReg gpr = boxDouble(fpr);
298 info.fillJSValue(gpr, DataFormatJSDouble);
300 m_gprs.retain(gpr, virtualRegister, SpillOrderJS);
306 // No retag required on JSVALUE64!
308 case DataFormatJSInteger:
309 case DataFormatJSDouble:
310 case DataFormatJSCell:
311 case DataFormatJSBoolean: {
312 GPRReg gpr = info.gpr();
317 case DataFormatBoolean:
318 // this type currently never occurs
319 ASSERT_NOT_REACHED();
322 ASSERT_NOT_REACHED();
323 return InvalidGPRReg;
326 void JITCodeGenerator::useChildren(Node& node)
328 if (node.op & NodeHasVarArgs) {
329 for (unsigned childIdx = node.firstChild(); childIdx < node.firstChild() + node.numChildren(); childIdx++)
330 use(m_jit.graph().m_varArgChildren[childIdx]);
332 NodeIndex child1 = node.child1();
333 if (child1 == NoNode) {
334 ASSERT(node.child2() == NoNode && node.child3() == NoNode);
339 NodeIndex child2 = node.child2();
340 if (child2 == NoNode) {
341 ASSERT(node.child3() == NoNode);
346 NodeIndex child3 = node.child3();
347 if (child3 == NoNode)
353 bool JITCodeGenerator::isStrictInt32(NodeIndex nodeIndex)
355 if (isInt32Constant(nodeIndex))
358 Node& node = m_jit.graph()[nodeIndex];
359 GenerationInfo& info = m_generationInfo[node.virtualRegister()];
361 return info.registerFormat() == DataFormatInteger;
364 bool JITCodeGenerator::isKnownInteger(NodeIndex nodeIndex)
366 if (isInt32Constant(nodeIndex))
369 Node& node = m_jit.graph()[nodeIndex];
371 if (node.hasInt32Result())
374 GenerationInfo& info = m_generationInfo[node.virtualRegister()];
376 return info.isJSInteger();
379 bool JITCodeGenerator::isKnownNumeric(NodeIndex nodeIndex)
381 if (isInt32Constant(nodeIndex) || isNumberConstant(nodeIndex))
384 Node& node = m_jit.graph()[nodeIndex];
386 if (node.hasNumberResult())
389 GenerationInfo& info = m_generationInfo[node.virtualRegister()];
391 return info.isJSInteger() || info.isJSDouble();
394 bool JITCodeGenerator::isKnownCell(NodeIndex nodeIndex)
396 return m_generationInfo[m_jit.graph()[nodeIndex].virtualRegister()].isJSCell();
399 bool JITCodeGenerator::isKnownNotInteger(NodeIndex nodeIndex)
401 Node& node = m_jit.graph()[nodeIndex];
402 VirtualRegister virtualRegister = node.virtualRegister();
403 GenerationInfo& info = m_generationInfo[virtualRegister];
405 return info.isJSDouble() || info.isJSCell() || info.isJSBoolean()
406 || (node.isConstant() && !valueOfJSConstant(nodeIndex).isInt32());
409 bool JITCodeGenerator::isKnownNotNumber(NodeIndex nodeIndex)
411 Node& node = m_jit.graph()[nodeIndex];
412 VirtualRegister virtualRegister = node.virtualRegister();
413 GenerationInfo& info = m_generationInfo[virtualRegister];
415 return (!info.isJSDouble() && !info.isJSInteger())
416 || (node.isConstant() && !isNumberConstant(nodeIndex));
419 bool JITCodeGenerator::isKnownBoolean(NodeIndex nodeIndex)
421 Node& node = m_jit.graph()[nodeIndex];
422 if (node.hasBooleanResult())
425 if (isBooleanConstant(nodeIndex))
428 VirtualRegister virtualRegister = node.virtualRegister();
429 GenerationInfo& info = m_generationInfo[virtualRegister];
431 return info.isJSBoolean();
434 template<typename To, typename From>
435 inline To safeCast(From value)
437 To result = static_cast<To>(value);
438 ASSERT(result == value);
442 JITCompiler::Call JITCodeGenerator::cachedGetById(GPRReg baseGPR, GPRReg resultGPR, GPRReg scratchGPR, unsigned identifierNumber, JITCompiler::Jump slowPathTarget, NodeType nodeType)
444 JITCompiler::DataLabelPtr structureToCompare;
445 JITCompiler::Jump structureCheck = m_jit.branchPtrWithPatch(JITCompiler::NotEqual, JITCompiler::Address(baseGPR, JSCell::structureOffset()), structureToCompare, JITCompiler::TrustedImmPtr(reinterpret_cast<void*>(-1)));
447 m_jit.loadPtr(JITCompiler::Address(baseGPR, JSObject::offsetOfPropertyStorage()), resultGPR);
448 JITCompiler::DataLabelCompact loadWithPatch = m_jit.loadPtrWithCompactAddressOffsetPatch(JITCompiler::Address(resultGPR, 0), resultGPR);
450 JITCompiler::Jump done = m_jit.jump();
452 structureCheck.link(&m_jit);
454 if (slowPathTarget.isSet())
455 slowPathTarget.link(&m_jit);
457 JITCompiler::Label slowCase = m_jit.label();
459 silentSpillAllRegisters(resultGPR);
460 m_jit.move(baseGPR, GPRInfo::argumentGPR1);
461 m_jit.move(JITCompiler::ImmPtr(identifier(identifierNumber)), GPRInfo::argumentGPR2);
462 m_jit.move(GPRInfo::callFrameRegister, GPRInfo::argumentGPR0);
463 JITCompiler::Call functionCall;
466 functionCall = appendCallWithExceptionCheck(operationGetByIdOptimize);
470 functionCall = appendCallWithExceptionCheck(operationGetMethodOptimize);
474 ASSERT_NOT_REACHED();
475 return JITCompiler::Call();
477 m_jit.move(GPRInfo::returnValueGPR, resultGPR);
478 silentFillAllRegisters(resultGPR);
482 JITCompiler::Label doneLabel = m_jit.label();
484 int16_t checkImmToCall = safeCast<int16_t>(m_jit.differenceBetween(structureToCompare, functionCall));
485 int16_t callToCheck = safeCast<int16_t>(m_jit.differenceBetween(functionCall, structureCheck));
486 int16_t callToLoad = safeCast<int16_t>(m_jit.differenceBetween(functionCall, loadWithPatch));
487 int16_t callToSlowCase = safeCast<int16_t>(m_jit.differenceBetween(functionCall, slowCase));
488 int16_t callToDone = safeCast<int16_t>(m_jit.differenceBetween(functionCall, doneLabel));
490 m_jit.addPropertyAccess(functionCall, checkImmToCall, callToCheck, callToLoad, callToSlowCase, callToDone, safeCast<int8_t>(baseGPR), safeCast<int8_t>(resultGPR), safeCast<int8_t>(scratchGPR));
492 if (scratchGPR != resultGPR && scratchGPR != InvalidGPRReg)
498 void JITCodeGenerator::writeBarrier(MacroAssembler& jit, GPRReg owner, GPRReg scratch, WriteBarrierUseKind useKind)
502 UNUSED_PARAM(scratch);
503 UNUSED_PARAM(useKind);
504 ASSERT(owner != scratch);
506 #if ENABLE(WRITE_BARRIER_PROFILING)
507 JITCompiler::emitCount(jit, WriteBarrierCounters::jitCounterFor(useKind));
511 void JITCodeGenerator::cachedPutById(GPRReg baseGPR, GPRReg valueGPR, GPRReg scratchGPR, unsigned identifierNumber, PutKind putKind, JITCompiler::Jump slowPathTarget)
513 JITCompiler::DataLabelPtr structureToCompare;
514 JITCompiler::Jump structureCheck = m_jit.branchPtrWithPatch(JITCompiler::NotEqual, JITCompiler::Address(baseGPR, JSCell::structureOffset()), structureToCompare, JITCompiler::TrustedImmPtr(reinterpret_cast<void*>(-1)));
516 writeBarrier(m_jit, baseGPR, scratchGPR, WriteBarrierForPropertyAccess);
518 m_jit.loadPtr(JITCompiler::Address(baseGPR, JSObject::offsetOfPropertyStorage()), scratchGPR);
519 JITCompiler::DataLabel32 storeWithPatch = m_jit.storePtrWithAddressOffsetPatch(valueGPR, JITCompiler::Address(scratchGPR, 0));
521 JITCompiler::Jump done = m_jit.jump();
523 structureCheck.link(&m_jit);
525 if (slowPathTarget.isSet())
526 slowPathTarget.link(&m_jit);
528 JITCompiler::Label slowCase = m_jit.label();
530 silentSpillAllRegisters(InvalidGPRReg);
531 setupTwoStubArgs<GPRInfo::argumentGPR1, GPRInfo::argumentGPR2>(valueGPR, baseGPR);
532 m_jit.move(JITCompiler::ImmPtr(identifier(identifierNumber)), GPRInfo::argumentGPR3);
533 m_jit.move(GPRInfo::callFrameRegister, GPRInfo::argumentGPR0);
534 V_DFGOperation_EJJI optimizedCall;
535 if (m_jit.codeBlock()->isStrictMode()) {
536 if (putKind == Direct)
537 optimizedCall = operationPutByIdDirectStrictOptimize;
539 optimizedCall = operationPutByIdStrictOptimize;
541 if (putKind == Direct)
542 optimizedCall = operationPutByIdDirectNonStrictOptimize;
544 optimizedCall = operationPutByIdNonStrictOptimize;
546 JITCompiler::Call functionCall = appendCallWithExceptionCheck(optimizedCall);
547 silentFillAllRegisters(InvalidGPRReg);
550 JITCompiler::Label doneLabel = m_jit.label();
552 int16_t checkImmToCall = safeCast<int16_t>(m_jit.differenceBetween(structureToCompare, functionCall));
553 int16_t callToCheck = safeCast<int16_t>(m_jit.differenceBetween(functionCall, structureCheck));
554 int16_t callToStore = safeCast<int16_t>(m_jit.differenceBetween(functionCall, storeWithPatch));
555 int16_t callToSlowCase = safeCast<int16_t>(m_jit.differenceBetween(functionCall, slowCase));
556 int16_t callToDone = safeCast<int16_t>(m_jit.differenceBetween(functionCall, doneLabel));
558 m_jit.addPropertyAccess(functionCall, checkImmToCall, callToCheck, callToStore, callToSlowCase, callToDone, safeCast<int8_t>(baseGPR), safeCast<int8_t>(valueGPR), safeCast<int8_t>(scratchGPR));
561 void JITCodeGenerator::cachedGetMethod(GPRReg baseGPR, GPRReg resultGPR, GPRReg scratchGPR, unsigned identifierNumber, JITCompiler::Jump slowPathTarget)
563 JITCompiler::Call slowCall;
564 JITCompiler::DataLabelPtr structToCompare, protoObj, protoStructToCompare, putFunction;
566 JITCompiler::Jump wrongStructure = m_jit.branchPtrWithPatch(JITCompiler::NotEqual, JITCompiler::Address(baseGPR, JSCell::structureOffset()), structToCompare, JITCompiler::TrustedImmPtr(reinterpret_cast<void*>(-1)));
567 protoObj = m_jit.moveWithPatch(JITCompiler::TrustedImmPtr(0), resultGPR);
568 JITCompiler::Jump wrongProtoStructure = m_jit.branchPtrWithPatch(JITCompiler::NotEqual, JITCompiler::Address(resultGPR, JSCell::structureOffset()), protoStructToCompare, JITCompiler::TrustedImmPtr(reinterpret_cast<void*>(-1)));
570 putFunction = m_jit.moveWithPatch(JITCompiler::TrustedImmPtr(0), resultGPR);
572 JITCompiler::Jump done = m_jit.jump();
574 wrongStructure.link(&m_jit);
575 wrongProtoStructure.link(&m_jit);
577 slowCall = cachedGetById(baseGPR, resultGPR, scratchGPR, identifierNumber, slowPathTarget, GetMethod);
581 m_jit.addMethodGet(slowCall, structToCompare, protoObj, protoStructToCompare, putFunction);
584 void JITCodeGenerator::nonSpeculativeNonPeepholeCompareNull(NodeIndex operand, bool invert)
586 JSValueOperand arg(this, operand);
587 GPRReg argGPR = arg.gpr();
589 GPRTemporary result(this, arg);
590 GPRReg resultGPR = result.gpr();
592 JITCompiler::Jump notCell;
594 if (!isKnownCell(operand))
595 notCell = m_jit.branchTestPtr(MacroAssembler::NonZero, argGPR, GPRInfo::tagMaskRegister);
597 m_jit.loadPtr(JITCompiler::Address(argGPR, JSCell::structureOffset()), resultGPR);
598 m_jit.test8(invert ? JITCompiler::Zero : JITCompiler::NonZero, JITCompiler::Address(resultGPR, Structure::typeInfoFlagsOffset()), JITCompiler::TrustedImm32(MasqueradesAsUndefined), resultGPR);
600 if (!isKnownCell(operand)) {
601 JITCompiler::Jump done = m_jit.jump();
603 notCell.link(&m_jit);
605 m_jit.move(argGPR, resultGPR);
606 m_jit.andPtr(JITCompiler::TrustedImm32(~TagBitUndefined), resultGPR);
607 m_jit.comparePtr(invert ? JITCompiler::NotEqual : JITCompiler::Equal, resultGPR, JITCompiler::TrustedImm32(ValueNull), resultGPR);
612 m_jit.or32(TrustedImm32(ValueFalse), resultGPR);
613 jsValueResult(resultGPR, m_compileIndex, DataFormatJSBoolean);
616 void JITCodeGenerator::nonSpeculativePeepholeBranchNull(NodeIndex operand, NodeIndex branchNodeIndex, bool invert)
618 Node& branchNode = m_jit.graph()[branchNodeIndex];
619 BlockIndex taken = m_jit.graph().blockIndexForBytecodeOffset(branchNode.takenBytecodeOffset());
620 BlockIndex notTaken = m_jit.graph().blockIndexForBytecodeOffset(branchNode.notTakenBytecodeOffset());
622 if (taken == (m_block + 1)) {
624 BlockIndex tmp = taken;
629 JSValueOperand arg(this, operand);
630 GPRReg argGPR = arg.gpr();
632 GPRTemporary result(this, arg);
633 GPRReg resultGPR = result.gpr();
635 JITCompiler::Jump notCell;
637 if (!isKnownCell(operand))
638 notCell = m_jit.branchTestPtr(MacroAssembler::NonZero, argGPR, GPRInfo::tagMaskRegister);
640 m_jit.loadPtr(JITCompiler::Address(argGPR, JSCell::structureOffset()), resultGPR);
641 addBranch(m_jit.branchTest8(invert ? JITCompiler::Zero : JITCompiler::NonZero, JITCompiler::Address(resultGPR, Structure::typeInfoFlagsOffset()), JITCompiler::TrustedImm32(MasqueradesAsUndefined)), taken);
643 if (!isKnownCell(operand)) {
644 addBranch(m_jit.jump(), notTaken);
646 notCell.link(&m_jit);
648 m_jit.move(argGPR, resultGPR);
649 m_jit.andPtr(JITCompiler::TrustedImm32(~TagBitUndefined), resultGPR);
650 addBranch(m_jit.branchPtr(invert ? JITCompiler::NotEqual : JITCompiler::Equal, resultGPR, JITCompiler::TrustedImmPtr(reinterpret_cast<void*>(ValueNull))), taken);
653 if (notTaken != (m_block + 1))
654 addBranch(m_jit.jump(), notTaken);
657 bool JITCodeGenerator::nonSpeculativeCompareNull(Node& node, NodeIndex operand, bool invert)
659 NodeIndex branchNodeIndex = detectPeepHoleBranch();
660 if (branchNodeIndex != NoNode) {
661 ASSERT(node.adjustedRefCount() == 1);
663 nonSpeculativePeepholeBranchNull(operand, branchNodeIndex, invert);
667 m_compileIndex = branchNodeIndex;
672 nonSpeculativeNonPeepholeCompareNull(operand, invert);
677 void JITCodeGenerator::nonSpeculativePeepholeBranch(Node& node, NodeIndex branchNodeIndex, MacroAssembler::RelationalCondition cond, Z_DFGOperation_EJJ helperFunction)
679 Node& branchNode = m_jit.graph()[branchNodeIndex];
680 BlockIndex taken = m_jit.graph().blockIndexForBytecodeOffset(branchNode.takenBytecodeOffset());
681 BlockIndex notTaken = m_jit.graph().blockIndexForBytecodeOffset(branchNode.notTakenBytecodeOffset());
683 JITCompiler::ResultCondition callResultCondition = JITCompiler::NonZero;
685 // The branch instruction will branch to the taken block.
686 // If taken is next, switch taken with notTaken & invert the branch condition so we can fall through.
687 if (taken == (m_block + 1)) {
688 cond = JITCompiler::invert(cond);
689 callResultCondition = JITCompiler::Zero;
690 BlockIndex tmp = taken;
695 JSValueOperand arg1(this, node.child1());
696 JSValueOperand arg2(this, node.child2());
697 GPRReg arg1GPR = arg1.gpr();
698 GPRReg arg2GPR = arg2.gpr();
700 JITCompiler::JumpList slowPath;
702 if (isKnownNotInteger(node.child1()) || isKnownNotInteger(node.child2())) {
703 GPRResult result(this);
704 GPRReg resultGPR = result.gpr();
711 callOperation(helperFunction, resultGPR, arg1GPR, arg2GPR);
712 addBranch(m_jit.branchTest8(callResultCondition, resultGPR), taken);
714 GPRTemporary result(this, arg2);
715 GPRReg resultGPR = result.gpr();
720 if (!isKnownInteger(node.child1()))
721 slowPath.append(m_jit.branchPtr(MacroAssembler::Below, arg1GPR, GPRInfo::tagTypeNumberRegister));
722 if (!isKnownInteger(node.child2()))
723 slowPath.append(m_jit.branchPtr(MacroAssembler::Below, arg2GPR, GPRInfo::tagTypeNumberRegister));
725 addBranch(m_jit.branch32(cond, arg1GPR, arg2GPR), taken);
727 if (!isKnownInteger(node.child1()) || !isKnownInteger(node.child2())) {
728 addBranch(m_jit.jump(), notTaken);
730 slowPath.link(&m_jit);
732 silentSpillAllRegisters(resultGPR);
733 setupStubArguments(arg1GPR, arg2GPR);
734 m_jit.move(GPRInfo::callFrameRegister, GPRInfo::argumentGPR0);
735 appendCallWithExceptionCheck(helperFunction);
736 m_jit.move(GPRInfo::returnValueGPR, resultGPR);
737 silentFillAllRegisters(resultGPR);
739 addBranch(m_jit.branchTest8(callResultCondition, resultGPR), taken);
743 if (notTaken != (m_block + 1))
744 addBranch(m_jit.jump(), notTaken);
747 void JITCodeGenerator::nonSpeculativeNonPeepholeCompare(Node& node, MacroAssembler::RelationalCondition cond, Z_DFGOperation_EJJ helperFunction)
749 JSValueOperand arg1(this, node.child1());
750 JSValueOperand arg2(this, node.child2());
751 GPRReg arg1GPR = arg1.gpr();
752 GPRReg arg2GPR = arg2.gpr();
754 JITCompiler::JumpList slowPath;
756 if (isKnownNotInteger(node.child1()) || isKnownNotInteger(node.child2())) {
757 GPRResult result(this);
758 GPRReg resultGPR = result.gpr();
765 callOperation(helperFunction, resultGPR, arg1GPR, arg2GPR);
767 m_jit.or32(TrustedImm32(ValueFalse), resultGPR);
768 jsValueResult(resultGPR, m_compileIndex, DataFormatJSBoolean, UseChildrenCalledExplicitly);
770 GPRTemporary result(this, arg2);
771 GPRReg resultGPR = result.gpr();
776 if (!isKnownInteger(node.child1()))
777 slowPath.append(m_jit.branchPtr(MacroAssembler::Below, arg1GPR, GPRInfo::tagTypeNumberRegister));
778 if (!isKnownInteger(node.child2()))
779 slowPath.append(m_jit.branchPtr(MacroAssembler::Below, arg2GPR, GPRInfo::tagTypeNumberRegister));
781 m_jit.compare32(cond, arg1GPR, arg2GPR, resultGPR);
783 if (!isKnownInteger(node.child1()) || !isKnownInteger(node.child2())) {
784 JITCompiler::Jump haveResult = m_jit.jump();
786 slowPath.link(&m_jit);
788 silentSpillAllRegisters(resultGPR);
789 setupStubArguments(arg1GPR, arg2GPR);
790 m_jit.move(GPRInfo::callFrameRegister, GPRInfo::argumentGPR0);
791 appendCallWithExceptionCheck(helperFunction);
792 m_jit.move(GPRInfo::returnValueGPR, resultGPR);
793 silentFillAllRegisters(resultGPR);
795 m_jit.andPtr(TrustedImm32(1), resultGPR);
797 haveResult.link(&m_jit);
800 m_jit.or32(TrustedImm32(ValueFalse), resultGPR);
802 jsValueResult(resultGPR, m_compileIndex, DataFormatJSBoolean, UseChildrenCalledExplicitly);
806 bool JITCodeGenerator::nonSpeculativeCompare(Node& node, MacroAssembler::RelationalCondition cond, Z_DFGOperation_EJJ helperFunction)
808 NodeIndex branchNodeIndex = detectPeepHoleBranch();
809 if (branchNodeIndex != NoNode) {
810 ASSERT(node.adjustedRefCount() == 1);
812 nonSpeculativePeepholeBranch(node, branchNodeIndex, cond, helperFunction);
814 m_compileIndex = branchNodeIndex;
819 nonSpeculativeNonPeepholeCompare(node, cond, helperFunction);
824 void JITCodeGenerator::nonSpeculativePeepholeStrictEq(Node& node, NodeIndex branchNodeIndex, bool invert)
826 Node& branchNode = m_jit.graph()[branchNodeIndex];
827 BlockIndex taken = m_jit.graph().blockIndexForBytecodeOffset(branchNode.takenBytecodeOffset());
828 BlockIndex notTaken = m_jit.graph().blockIndexForBytecodeOffset(branchNode.notTakenBytecodeOffset());
830 // The branch instruction will branch to the taken block.
831 // If taken is next, switch taken with notTaken & invert the branch condition so we can fall through.
832 if (taken == (m_block + 1)) {
834 BlockIndex tmp = taken;
839 JSValueOperand arg1(this, node.child1());
840 JSValueOperand arg2(this, node.child2());
841 GPRReg arg1GPR = arg1.gpr();
842 GPRReg arg2GPR = arg2.gpr();
844 GPRTemporary result(this);
845 GPRReg resultGPR = result.gpr();
850 if (isKnownCell(node.child1()) && isKnownCell(node.child2())) {
851 // see if we get lucky: if the arguments are cells and they reference the same
852 // cell, then they must be strictly equal.
853 addBranch(m_jit.branchPtr(JITCompiler::Equal, arg1GPR, arg2GPR), invert ? notTaken : taken);
855 silentSpillAllRegisters(resultGPR);
856 setupStubArguments(arg1GPR, arg2GPR);
857 m_jit.move(GPRInfo::callFrameRegister, GPRInfo::argumentGPR0);
858 appendCallWithExceptionCheck(operationCompareStrictEqCell);
859 m_jit.move(GPRInfo::returnValueGPR, resultGPR);
860 silentFillAllRegisters(resultGPR);
862 addBranch(m_jit.branchTest8(invert ? JITCompiler::NonZero : JITCompiler::Zero, resultGPR), taken);
864 m_jit.orPtr(arg1GPR, arg2GPR, resultGPR);
866 JITCompiler::Jump twoCellsCase = m_jit.branchTestPtr(JITCompiler::Zero, resultGPR, GPRInfo::tagMaskRegister);
868 JITCompiler::Jump numberCase = m_jit.branchTestPtr(JITCompiler::NonZero, resultGPR, GPRInfo::tagTypeNumberRegister);
870 addBranch(m_jit.branch32(invert ? JITCompiler::NotEqual : JITCompiler::Equal, arg1GPR, arg2GPR), taken);
871 addBranch(m_jit.jump(), notTaken);
873 twoCellsCase.link(&m_jit);
874 addBranch(m_jit.branchPtr(JITCompiler::Equal, arg1GPR, arg2GPR), invert ? notTaken : taken);
876 numberCase.link(&m_jit);
878 silentSpillAllRegisters(resultGPR);
879 setupStubArguments(arg1GPR, arg2GPR);
880 m_jit.move(GPRInfo::callFrameRegister, GPRInfo::argumentGPR0);
881 appendCallWithExceptionCheck(operationCompareStrictEq);
882 m_jit.move(GPRInfo::returnValueGPR, resultGPR);
883 silentFillAllRegisters(resultGPR);
885 addBranch(m_jit.branchTest8(invert ? JITCompiler::Zero : JITCompiler::NonZero, resultGPR), taken);
888 if (notTaken != (m_block + 1))
889 addBranch(m_jit.jump(), notTaken);
892 void JITCodeGenerator::nonSpeculativeNonPeepholeStrictEq(Node& node, bool invert)
894 JSValueOperand arg1(this, node.child1());
895 JSValueOperand arg2(this, node.child2());
896 GPRReg arg1GPR = arg1.gpr();
897 GPRReg arg2GPR = arg2.gpr();
899 GPRTemporary result(this);
900 GPRReg resultGPR = result.gpr();
905 if (isKnownCell(node.child1()) && isKnownCell(node.child2())) {
906 // see if we get lucky: if the arguments are cells and they reference the same
907 // cell, then they must be strictly equal.
908 JITCompiler::Jump notEqualCase = m_jit.branchPtr(JITCompiler::NotEqual, arg1GPR, arg2GPR);
910 m_jit.move(JITCompiler::TrustedImmPtr(JSValue::encode(jsBoolean(!invert))), resultGPR);
912 JITCompiler::Jump done = m_jit.jump();
914 notEqualCase.link(&m_jit);
916 silentSpillAllRegisters(resultGPR);
917 setupStubArguments(arg1GPR, arg2GPR);
918 m_jit.move(GPRInfo::callFrameRegister, GPRInfo::argumentGPR0);
919 appendCallWithExceptionCheck(operationCompareStrictEqCell);
920 m_jit.move(GPRInfo::returnValueGPR, resultGPR);
921 silentFillAllRegisters(resultGPR);
923 m_jit.andPtr(JITCompiler::TrustedImm32(1), resultGPR);
924 m_jit.or32(JITCompiler::TrustedImm32(ValueFalse), resultGPR);
928 m_jit.orPtr(arg1GPR, arg2GPR, resultGPR);
930 JITCompiler::Jump twoCellsCase = m_jit.branchTestPtr(JITCompiler::Zero, resultGPR, GPRInfo::tagMaskRegister);
932 JITCompiler::Jump numberCase = m_jit.branchTestPtr(JITCompiler::NonZero, resultGPR, GPRInfo::tagTypeNumberRegister);
934 m_jit.compare32(invert ? JITCompiler::NotEqual : JITCompiler::Equal, arg1GPR, arg2GPR, resultGPR);
936 JITCompiler::Jump done1 = m_jit.jump();
938 twoCellsCase.link(&m_jit);
939 JITCompiler::Jump notEqualCase = m_jit.branchPtr(JITCompiler::NotEqual, arg1GPR, arg2GPR);
941 m_jit.move(JITCompiler::TrustedImmPtr(JSValue::encode(jsBoolean(!invert))), resultGPR);
943 JITCompiler::Jump done2 = m_jit.jump();
945 numberCase.link(&m_jit);
946 notEqualCase.link(&m_jit);
948 silentSpillAllRegisters(resultGPR);
949 setupStubArguments(arg1GPR, arg2GPR);
950 m_jit.move(GPRInfo::callFrameRegister, GPRInfo::argumentGPR0);
951 appendCallWithExceptionCheck(operationCompareStrictEq);
952 m_jit.move(GPRInfo::returnValueGPR, resultGPR);
953 silentFillAllRegisters(resultGPR);
955 m_jit.andPtr(JITCompiler::TrustedImm32(1), resultGPR);
959 m_jit.or32(JITCompiler::TrustedImm32(ValueFalse), resultGPR);
964 jsValueResult(resultGPR, m_compileIndex, DataFormatJSBoolean, UseChildrenCalledExplicitly);
967 bool JITCodeGenerator::nonSpeculativeStrictEq(Node& node, bool invert)
969 if (!invert && (isKnownNumeric(node.child1()) || isKnownNumeric(node.child2())))
970 return nonSpeculativeCompare(node, MacroAssembler::Equal, operationCompareStrictEq);
972 NodeIndex branchNodeIndex = detectPeepHoleBranch();
973 if (branchNodeIndex != NoNode) {
974 ASSERT(node.adjustedRefCount() == 1);
976 nonSpeculativePeepholeStrictEq(node, branchNodeIndex, invert);
978 m_compileIndex = branchNodeIndex;
983 nonSpeculativeNonPeepholeStrictEq(node, invert);
988 void JITCodeGenerator::emitBranch(Node& node)
990 JSValueOperand value(this, node.child1());
991 GPRReg valueGPR = value.gpr();
993 BlockIndex taken = m_jit.graph().blockIndexForBytecodeOffset(node.takenBytecodeOffset());
994 BlockIndex notTaken = m_jit.graph().blockIndexForBytecodeOffset(node.notTakenBytecodeOffset());
996 if (isKnownBoolean(node.child1())) {
997 MacroAssembler::ResultCondition condition = MacroAssembler::NonZero;
999 if (taken == (m_block + 1)) {
1000 condition = MacroAssembler::Zero;
1001 BlockIndex tmp = taken;
1006 addBranch(m_jit.branchTest32(condition, valueGPR, TrustedImm32(true)), taken);
1007 if (notTaken != (m_block + 1))
1008 addBranch(m_jit.jump(), notTaken);
1010 noResult(m_compileIndex);
1012 GPRTemporary result(this);
1013 GPRReg resultGPR = result.gpr();
1015 bool predictBoolean = isBooleanPrediction(m_jit.graph().getPrediction(m_jit.graph()[node.child1()]));
1017 if (predictBoolean) {
1018 addBranch(m_jit.branchPtr(MacroAssembler::Equal, valueGPR, MacroAssembler::ImmPtr(JSValue::encode(jsBoolean(false)))), notTaken);
1019 addBranch(m_jit.branchPtr(MacroAssembler::Equal, valueGPR, MacroAssembler::ImmPtr(JSValue::encode(jsBoolean(true)))), taken);
1022 if (m_isSpeculative && predictBoolean) {
1023 speculationCheck(m_jit.jump());
1026 addBranch(m_jit.branchPtr(MacroAssembler::Equal, valueGPR, MacroAssembler::ImmPtr(JSValue::encode(jsNumber(0)))), notTaken);
1027 addBranch(m_jit.branchPtr(MacroAssembler::AboveOrEqual, valueGPR, GPRInfo::tagTypeNumberRegister), taken);
1029 if (!predictBoolean) {
1030 addBranch(m_jit.branchPtr(MacroAssembler::Equal, valueGPR, MacroAssembler::ImmPtr(JSValue::encode(jsBoolean(false)))), notTaken);
1031 addBranch(m_jit.branchPtr(MacroAssembler::Equal, valueGPR, MacroAssembler::ImmPtr(JSValue::encode(jsBoolean(true)))), taken);
1036 silentSpillAllRegisters(resultGPR);
1037 m_jit.move(valueGPR, GPRInfo::argumentGPR1);
1038 m_jit.move(GPRInfo::callFrameRegister, GPRInfo::argumentGPR0);
1039 appendCallWithExceptionCheck(dfgConvertJSValueToBoolean);
1040 m_jit.move(GPRInfo::returnValueGPR, resultGPR);
1041 silentFillAllRegisters(resultGPR);
1043 addBranch(m_jit.branchTest8(MacroAssembler::NonZero, resultGPR), taken);
1044 if (notTaken != (m_block + 1))
1045 addBranch(m_jit.jump(), notTaken);
1048 noResult(m_compileIndex, UseChildrenCalledExplicitly);
1052 void JITCodeGenerator::nonSpeculativeLogicalNot(Node& node)
1054 JSValueOperand arg1(this, node.child1());
1055 GPRTemporary result(this);
1057 GPRReg arg1GPR = arg1.gpr();
1058 GPRReg resultGPR = result.gpr();
1062 m_jit.move(arg1GPR, resultGPR);
1063 m_jit.xorPtr(TrustedImm32(static_cast<int32_t>(ValueFalse)), resultGPR);
1064 JITCompiler::Jump fastCase = m_jit.branchTestPtr(JITCompiler::Zero, resultGPR, TrustedImm32(static_cast<int32_t>(~1)));
1066 silentSpillAllRegisters(resultGPR);
1067 m_jit.move(arg1GPR, GPRInfo::argumentGPR1);
1068 m_jit.move(GPRInfo::callFrameRegister, GPRInfo::argumentGPR0);
1069 appendCallWithExceptionCheck(dfgConvertJSValueToBoolean);
1070 m_jit.move(GPRInfo::returnValueGPR, resultGPR);
1071 silentFillAllRegisters(resultGPR);
1073 fastCase.link(&m_jit);
1075 m_jit.xorPtr(TrustedImm32(static_cast<int32_t>(ValueTrue)), resultGPR);
1076 jsValueResult(resultGPR, m_compileIndex, DataFormatJSBoolean, UseChildrenCalledExplicitly);
1079 void JITCodeGenerator::emitCall(Node& node)
1081 P_DFGOperation_E slowCallFunction;
1084 if (node.op == Call) {
1085 slowCallFunction = operationLinkCall;
1088 ASSERT(node.op == Construct);
1089 slowCallFunction = operationLinkConstruct;
1093 NodeIndex calleeNodeIndex = m_jit.graph().m_varArgChildren[node.firstChild()];
1094 JSValueOperand callee(this, calleeNodeIndex);
1095 GPRReg calleeGPR = callee.gpr();
1096 use(calleeNodeIndex);
1098 // the call instruction's first child is either the function (normal call) or the
1099 // receiver (method call). subsequent children are the arguments.
1100 int numArgs = node.numChildren() - 1;
1102 // amount of stuff (in units of sizeof(Register)) that we need to place at the
1103 // top of the JS stack.
1104 int callDataSize = 0;
1106 // first there are the arguments
1107 callDataSize += numArgs;
1109 // and then there is the call frame header
1110 callDataSize += RegisterFile::CallFrameHeaderSize;
1112 m_jit.storePtr(MacroAssembler::TrustedImmPtr(JSValue::encode(jsNumber(numArgs))), addressOfCallData(RegisterFile::ArgumentCount));
1113 m_jit.storePtr(GPRInfo::callFrameRegister, addressOfCallData(RegisterFile::CallerFrame));
1115 if (node.op == Construct)
1116 use(m_jit.graph().m_varArgChildren[node.firstChild() + 1]);
1118 for (int argIdx = (node.op == Call ? 0 : 1); argIdx < numArgs; argIdx++) {
1119 NodeIndex argNodeIndex = m_jit.graph().m_varArgChildren[node.firstChild() + 1 + argIdx];
1120 JSValueOperand arg(this, argNodeIndex);
1121 GPRReg argGPR = arg.gpr();
1124 m_jit.storePtr(argGPR, addressOfCallData(-callDataSize + argIdx));
1127 m_jit.storePtr(calleeGPR, addressOfCallData(RegisterFile::Callee));
1131 GPRResult result(this);
1132 GPRReg resultGPR = result.gpr();
1134 JITCompiler::DataLabelPtr targetToCheck;
1135 JITCompiler::Jump slowPath;
1137 slowPath = m_jit.branchPtrWithPatch(MacroAssembler::NotEqual, calleeGPR, targetToCheck, MacroAssembler::TrustedImmPtr(JSValue::encode(JSValue())));
1138 m_jit.loadPtr(MacroAssembler::Address(calleeGPR, OBJECT_OFFSETOF(JSFunction, m_scopeChain)), resultGPR);
1139 m_jit.storePtr(resultGPR, addressOfCallData(RegisterFile::ScopeChain));
1141 m_jit.addPtr(Imm32(m_jit.codeBlock()->m_numCalleeRegisters * sizeof(Register)), GPRInfo::callFrameRegister);
1143 JITCompiler::Call fastCall = m_jit.nearCall();
1144 m_jit.notifyCall(fastCall, m_jit.graph()[m_compileIndex].codeOrigin);
1146 JITCompiler::Jump done = m_jit.jump();
1148 slowPath.link(&m_jit);
1150 m_jit.addPtr(Imm32(m_jit.codeBlock()->m_numCalleeRegisters * sizeof(Register)), GPRInfo::callFrameRegister, GPRInfo::argumentGPR0);
1151 JITCompiler::Call slowCall = m_jit.appendCallWithFastExceptionCheck(slowCallFunction, m_jit.graph()[m_compileIndex].codeOrigin);
1152 m_jit.move(Imm32(numArgs), GPRInfo::regT1);
1153 m_jit.addPtr(Imm32(m_jit.codeBlock()->m_numCalleeRegisters * sizeof(Register)), GPRInfo::callFrameRegister);
1154 m_jit.notifyCall(m_jit.call(GPRInfo::returnValueGPR), m_jit.graph()[m_compileIndex].codeOrigin);
1158 m_jit.move(GPRInfo::returnValueGPR, resultGPR);
1160 jsValueResult(resultGPR, m_compileIndex, DataFormatJS, UseChildrenCalledExplicitly);
1162 m_jit.addJSCall(fastCall, slowCall, targetToCheck, isCall, m_jit.graph()[m_compileIndex].codeOrigin);
1165 void JITCodeGenerator::speculationCheck(MacroAssembler::Jump jumpToFail)
1167 ASSERT(m_isSpeculative);
1168 static_cast<SpeculativeJIT*>(this)->speculationCheck(jumpToFail);
1172 static const char* dataFormatString(DataFormat format)
1174 // These values correspond to the DataFormat enum.
1175 const char* strings[] = {
1193 return strings[format];
1196 void JITCodeGenerator::dump(const char* label)
1199 fprintf(stderr, "<%s>\n", label);
1201 fprintf(stderr, " gprs:\n");
1203 fprintf(stderr, " fprs:\n");
1205 fprintf(stderr, " VirtualRegisters:\n");
1206 for (unsigned i = 0; i < m_generationInfo.size(); ++i) {
1207 GenerationInfo& info = m_generationInfo[i];
1209 fprintf(stderr, " % 3d:%s%s", i, dataFormatString(info.registerFormat()), dataFormatString(info.spillFormat()));
1211 fprintf(stderr, " % 3d:[__][__]", i);
1212 if (info.registerFormat() == DataFormatDouble)
1213 fprintf(stderr, ":fpr%d\n", info.fpr());
1214 else if (info.registerFormat() != DataFormatNone) {
1215 ASSERT(info.gpr() != InvalidGPRReg);
1216 fprintf(stderr, ":%s\n", GPRInfo::debugName(info.gpr()));
1218 fprintf(stderr, "\n");
1221 fprintf(stderr, "</%s>\n", label);
1226 #if ENABLE(DFG_CONSISTENCY_CHECK)
1227 void JITCodeGenerator::checkConsistency()
1229 bool failed = false;
1231 for (gpr_iterator iter = m_gprs.begin(); iter != m_gprs.end(); ++iter) {
1232 if (iter.isLocked()) {
1233 fprintf(stderr, "DFG_CONSISTENCY_CHECK failed: gpr %s is locked.\n", iter.debugName());
1237 for (fpr_iterator iter = m_fprs.begin(); iter != m_fprs.end(); ++iter) {
1238 if (iter.isLocked()) {
1239 fprintf(stderr, "DFG_CONSISTENCY_CHECK failed: fpr %s is locked.\n", iter.debugName());
1244 for (unsigned i = 0; i < m_generationInfo.size(); ++i) {
1245 VirtualRegister virtualRegister = (VirtualRegister)i;
1246 GenerationInfo& info = m_generationInfo[virtualRegister];
1249 switch (info.registerFormat()) {
1250 case DataFormatNone:
1252 case DataFormatInteger:
1253 case DataFormatCell:
1255 case DataFormatJSInteger:
1256 case DataFormatJSDouble:
1257 case DataFormatJSCell: {
1258 GPRReg gpr = info.gpr();
1259 ASSERT(gpr != InvalidGPRReg);
1260 if (m_gprs.name(gpr) != virtualRegister) {
1261 fprintf(stderr, "DFG_CONSISTENCY_CHECK failed: name mismatch for virtual register %d (gpr %s).\n", virtualRegister, GPRInfo::debugName(gpr));
1266 case DataFormatDouble: {
1267 FPRReg fpr = info.fpr();
1268 ASSERT(fpr != InvalidFPRReg);
1269 if (m_fprs.name(fpr) != virtualRegister) {
1270 fprintf(stderr, "DFG_CONSISTENCY_CHECK failed: name mismatch for virtual register %d (fpr %s).\n", virtualRegister, FPRInfo::debugName(fpr));
1278 for (gpr_iterator iter = m_gprs.begin(); iter != m_gprs.end(); ++iter) {
1279 VirtualRegister virtualRegister = iter.name();
1280 if (virtualRegister == InvalidVirtualRegister)
1283 GenerationInfo& info = m_generationInfo[virtualRegister];
1284 if (iter.regID() != info.gpr()) {
1285 fprintf(stderr, "DFG_CONSISTENCY_CHECK failed: name mismatch for gpr %s (virtual register %d).\n", iter.debugName(), virtualRegister);
1290 for (fpr_iterator iter = m_fprs.begin(); iter != m_fprs.end(); ++iter) {
1291 VirtualRegister virtualRegister = iter.name();
1292 if (virtualRegister == InvalidVirtualRegister)
1295 GenerationInfo& info = m_generationInfo[virtualRegister];
1296 if (iter.regID() != info.fpr()) {
1297 fprintf(stderr, "DFG_CONSISTENCY_CHECK failed: name mismatch for fpr %s (virtual register %d).\n", iter.debugName(), virtualRegister);
1309 GPRTemporary::GPRTemporary(JITCodeGenerator* jit)
1311 , m_gpr(InvalidGPRReg)
1313 m_gpr = m_jit->allocate();
1316 GPRTemporary::GPRTemporary(JITCodeGenerator* jit, GPRReg specific)
1318 , m_gpr(InvalidGPRReg)
1320 m_gpr = m_jit->allocate(specific);
1323 GPRTemporary::GPRTemporary(JITCodeGenerator* jit, SpeculateIntegerOperand& op1)
1325 , m_gpr(InvalidGPRReg)
1327 if (m_jit->canReuse(op1.index()))
1328 m_gpr = m_jit->reuse(op1.gpr());
1330 m_gpr = m_jit->allocate();
1333 GPRTemporary::GPRTemporary(JITCodeGenerator* jit, SpeculateIntegerOperand& op1, SpeculateIntegerOperand& op2)
1335 , m_gpr(InvalidGPRReg)
1337 if (m_jit->canReuse(op1.index()))
1338 m_gpr = m_jit->reuse(op1.gpr());
1339 else if (m_jit->canReuse(op2.index()))
1340 m_gpr = m_jit->reuse(op2.gpr());
1342 m_gpr = m_jit->allocate();
1345 GPRTemporary::GPRTemporary(JITCodeGenerator* jit, IntegerOperand& op1)
1347 , m_gpr(InvalidGPRReg)
1349 if (m_jit->canReuse(op1.index()))
1350 m_gpr = m_jit->reuse(op1.gpr());
1352 m_gpr = m_jit->allocate();
1355 GPRTemporary::GPRTemporary(JITCodeGenerator* jit, IntegerOperand& op1, IntegerOperand& op2)
1357 , m_gpr(InvalidGPRReg)
1359 if (m_jit->canReuse(op1.index()))
1360 m_gpr = m_jit->reuse(op1.gpr());
1361 else if (m_jit->canReuse(op2.index()))
1362 m_gpr = m_jit->reuse(op2.gpr());
1364 m_gpr = m_jit->allocate();
1367 GPRTemporary::GPRTemporary(JITCodeGenerator* jit, SpeculateCellOperand& op1)
1369 , m_gpr(InvalidGPRReg)
1371 if (m_jit->canReuse(op1.index()))
1372 m_gpr = m_jit->reuse(op1.gpr());
1374 m_gpr = m_jit->allocate();
1377 GPRTemporary::GPRTemporary(JITCodeGenerator* jit, SpeculateBooleanOperand& op1)
1379 , m_gpr(InvalidGPRReg)
1381 if (m_jit->canReuse(op1.index()))
1382 m_gpr = m_jit->reuse(op1.gpr());
1384 m_gpr = m_jit->allocate();
1387 GPRTemporary::GPRTemporary(JITCodeGenerator* jit, JSValueOperand& op1)
1389 , m_gpr(InvalidGPRReg)
1391 if (m_jit->canReuse(op1.index()))
1392 m_gpr = m_jit->reuse(op1.gpr());
1394 m_gpr = m_jit->allocate();
1397 FPRTemporary::FPRTemporary(JITCodeGenerator* jit)
1399 , m_fpr(InvalidFPRReg)
1401 m_fpr = m_jit->fprAllocate();
1404 FPRTemporary::FPRTemporary(JITCodeGenerator* jit, DoubleOperand& op1)
1406 , m_fpr(InvalidFPRReg)
1408 if (m_jit->canReuse(op1.index()))
1409 m_fpr = m_jit->reuse(op1.fpr());
1411 m_fpr = m_jit->fprAllocate();
1414 FPRTemporary::FPRTemporary(JITCodeGenerator* jit, DoubleOperand& op1, DoubleOperand& op2)
1416 , m_fpr(InvalidFPRReg)
1418 if (m_jit->canReuse(op1.index()))
1419 m_fpr = m_jit->reuse(op1.fpr());
1420 else if (m_jit->canReuse(op2.index()))
1421 m_fpr = m_jit->reuse(op2.fpr());
1423 m_fpr = m_jit->fprAllocate();
1426 FPRTemporary::FPRTemporary(JITCodeGenerator* jit, SpeculateDoubleOperand& op1)
1428 , m_fpr(InvalidFPRReg)
1430 if (m_jit->canReuse(op1.index()))
1431 m_fpr = m_jit->reuse(op1.fpr());
1433 m_fpr = m_jit->fprAllocate();
1436 FPRTemporary::FPRTemporary(JITCodeGenerator* jit, SpeculateDoubleOperand& op1, SpeculateDoubleOperand& op2)
1438 , m_fpr(InvalidFPRReg)
1440 if (m_jit->canReuse(op1.index()))
1441 m_fpr = m_jit->reuse(op1.fpr());
1442 else if (m_jit->canReuse(op2.index()))
1443 m_fpr = m_jit->reuse(op2.fpr());
1445 m_fpr = m_jit->fprAllocate();
1448 } } // namespace JSC::DFG