2 * Copyright (C) 2008 Apple Inc. All rights reserved.
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30 #include "CallFrame.h"
32 #include "MacroAssemblerCodeRef.h"
45 typedef MacroAssemblerCodeRef CodeRef;
46 typedef MacroAssemblerCodePtr CodePtr;
51 enum JITType { HostCallThunk, BaselineJIT, DFGJIT };
53 static JITType bottomTierJIT()
55 #if ENABLE(TIERED_COMPILATION)
62 static JITType topTierJIT()
67 static JITType nextTierJIT(JITType jitType)
69 ASSERT_UNUSED(jitType, jitType == BaselineJIT || jitType == DFGJIT);
78 JITCode(const CodeRef ref, JITType jitType)
84 bool operator !() const
89 CodePtr addressForCall()
94 // This function returns the offset in bytes of 'pointerIntoCode' into
95 // this block of code. The pointer provided must be a pointer into this
96 // block of code. It is ASSERTed that no codeblock >4gb in size.
97 unsigned offsetOf(void* pointerIntoCode)
99 intptr_t result = reinterpret_cast<intptr_t>(pointerIntoCode) - reinterpret_cast<intptr_t>(m_ref.code().executableAddress());
100 ASSERT(static_cast<intptr_t>(static_cast<unsigned>(result)) == result);
101 return static_cast<unsigned>(result);
105 inline JSValue execute(RegisterFile* registerFile, CallFrame* callFrame, JSGlobalData* globalData)
107 JSValue result = JSValue::decode(ctiTrampoline(m_ref.code().executableAddress(), registerFile, callFrame, 0, Profiler::enabledProfilerReference(), globalData));
108 return globalData->exception ? jsNull() : result;
113 return m_ref.code().dataLocation();
118 ASSERT(m_ref.code().executableAddress());
122 ExecutableMemoryHandle* getExecutableMemory()
124 return m_ref.executableMemory();
132 // Host functions are a bit special; they have a m_code pointer but they
133 // do not individully ref the executable pool containing the trampoline.
134 static JITCode HostFunction(CodeRef code)
136 return JITCode(code, HostCallThunk);
142 new (&m_ref) CodeRef();
146 JITCode(PassRefPtr<ExecutableMemoryHandle> executableMemory, JITType jitType)
147 : m_ref(executableMemory)
154 #endif // ENABLE(JIT)