Commit a147a4d3 authored by fpizlo@apple.com's avatar fpizlo@apple.com

Inline caches that refer to otherwise dead objects should be cleared

https://bugs.webkit.org/show_bug.cgi?id=72311

Reviewed by Geoff Garen.

DFG code blocks now participate in the weak reference harvester fixpoint
so that they only consider themselves to be live if either they are
currently executing, or their owner is live and all of their weak references
are live. If not, the relevant code blocks are jettisoned.

Inline caches in both the old JIT and the DFG are now cleared if any of
their references are not marked at the end of a GC.

This is performance-neutral on SunSpider, V8, and Kraken. With the clear-
all-code-on-GC policy that we currently have, it shows a slight reduction
in memory usage. If we turn that policy off, it's pretty easy to come up
with an example program that will cause ToT to experience linear heap
growth, while with this patch, the heap stays small and remains at a
constant size.

* assembler/ARMv7Assembler.h:
(JSC::ARMv7Assembler::readCallTarget):
* assembler/MacroAssemblerARMv7.h:
(JSC::MacroAssemblerARMv7::readCallTarget):
* assembler/MacroAssemblerX86.h:
(JSC::MacroAssemblerX86::readCallTarget):
* assembler/MacroAssemblerX86_64.h:
(JSC::MacroAssemblerX86_64::readCallTarget):
* bytecode/CodeBlock.cpp:
(JSC::CodeBlock::visitAggregate):
(JSC::CodeBlock::performTracingFixpointIteration):
(JSC::CodeBlock::visitWeakReferences):
(JSC::CodeBlock::finalizeUnconditionally):
(JSC::CodeBlock::stronglyVisitStrongReferences):
(JSC::MethodCallLinkInfo::reset):
(JSC::ProgramCodeBlock::jettison):
(JSC::EvalCodeBlock::jettison):
(JSC::FunctionCodeBlock::jettison):
* bytecode/CodeBlock.h:
(JSC::CodeBlock::reoptimize):
(JSC::CodeBlock::shouldImmediatelyAssumeLivenessDuringScan):
* bytecode/Instruction.h:
(JSC::PolymorphicAccessStructureList::visitWeak):
* bytecode/StructureStubInfo.cpp:
(JSC::StructureStubInfo::visitWeakReferences):
* bytecode/StructureStubInfo.h:
(JSC::isGetByIdAccess):
(JSC::isPutByIdAccess):
(JSC::StructureStubInfo::reset):
* dfg/DFGJITCompiler.cpp:
(JSC::DFG::JITCompiler::link):
* dfg/DFGOperations.cpp:
* dfg/DFGRepatch.cpp:
(JSC::DFG::dfgRepatchByIdSelfAccess):
(JSC::DFG::dfgResetGetByID):
(JSC::DFG::dfgResetPutByID):
* dfg/DFGRepatch.h:
(JSC::DFG::dfgResetGetByID):
(JSC::DFG::dfgResetPutByID):
* jit/JIT.h:
* jit/JITPropertyAccess.cpp:
(JSC::JIT::resetPatchGetById):
(JSC::JIT::resetPatchPutById):
* jit/JITPropertyAccess32_64.cpp:
(JSC::JIT::resetPatchGetById):
(JSC::JIT::resetPatchPutById):
* jit/JITStubs.cpp:
(JSC::DEFINE_STUB_FUNCTION):
* jit/JITWriteBarrier.h:
(JSC::JITWriteBarrierBase::clearToMaxUnsigned):



git-svn-id: http://svn.webkit.org/repository/webkit/trunk@100880 268f45cc-cd09-0410-ab3c-d52691b4dbfc
parent 5da687a7
2011-11-18 Filip Pizlo <fpizlo@apple.com>
Inline caches that refer to otherwise dead objects should be cleared
https://bugs.webkit.org/show_bug.cgi?id=72311
Reviewed by Geoff Garen.
DFG code blocks now participate in the weak reference harvester fixpoint
so that they only consider themselves to be live if either they are
currently executing, or their owner is live and all of their weak references
are live. If not, the relevant code blocks are jettisoned.
Inline caches in both the old JIT and the DFG are now cleared if any of
their references are not marked at the end of a GC.
This is performance-neutral on SunSpider, V8, and Kraken. With the clear-
all-code-on-GC policy that we currently have, it shows a slight reduction
in memory usage. If we turn that policy off, it's pretty easy to come up
with an example program that will cause ToT to experience linear heap
growth, while with this patch, the heap stays small and remains at a
constant size.
* assembler/ARMv7Assembler.h:
(JSC::ARMv7Assembler::readCallTarget):
* assembler/MacroAssemblerARMv7.h:
(JSC::MacroAssemblerARMv7::readCallTarget):
* assembler/MacroAssemblerX86.h:
(JSC::MacroAssemblerX86::readCallTarget):
* assembler/MacroAssemblerX86_64.h:
(JSC::MacroAssemblerX86_64::readCallTarget):
* bytecode/CodeBlock.cpp:
(JSC::CodeBlock::visitAggregate):
(JSC::CodeBlock::performTracingFixpointIteration):
(JSC::CodeBlock::visitWeakReferences):
(JSC::CodeBlock::finalizeUnconditionally):
(JSC::CodeBlock::stronglyVisitStrongReferences):
(JSC::MethodCallLinkInfo::reset):
(JSC::ProgramCodeBlock::jettison):
(JSC::EvalCodeBlock::jettison):
(JSC::FunctionCodeBlock::jettison):
* bytecode/CodeBlock.h:
(JSC::CodeBlock::reoptimize):
(JSC::CodeBlock::shouldImmediatelyAssumeLivenessDuringScan):
* bytecode/Instruction.h:
(JSC::PolymorphicAccessStructureList::visitWeak):
* bytecode/StructureStubInfo.cpp:
(JSC::StructureStubInfo::visitWeakReferences):
* bytecode/StructureStubInfo.h:
(JSC::isGetByIdAccess):
(JSC::isPutByIdAccess):
(JSC::StructureStubInfo::reset):
* dfg/DFGJITCompiler.cpp:
(JSC::DFG::JITCompiler::link):
* dfg/DFGOperations.cpp:
* dfg/DFGRepatch.cpp:
(JSC::DFG::dfgRepatchByIdSelfAccess):
(JSC::DFG::dfgResetGetByID):
(JSC::DFG::dfgResetPutByID):
* dfg/DFGRepatch.h:
(JSC::DFG::dfgResetGetByID):
(JSC::DFG::dfgResetPutByID):
* jit/JIT.h:
* jit/JITPropertyAccess.cpp:
(JSC::JIT::resetPatchGetById):
(JSC::JIT::resetPatchPutById):
* jit/JITPropertyAccess32_64.cpp:
(JSC::JIT::resetPatchGetById):
(JSC::JIT::resetPatchPutById):
* jit/JITStubs.cpp:
(JSC::DEFINE_STUB_FUNCTION):
* jit/JITWriteBarrier.h:
(JSC::JITWriteBarrierBase::clearToMaxUnsigned):
2011-11-20 Filip Pizlo <fpizlo@apple.com>
Showing the data overlay in OpenStreetMap doesn't work, zooming partially broken
......@@ -1894,6 +1894,11 @@ public:
setPointer(reinterpret_cast<uint16_t*>(from) - 1, to);
}
static void* readCallTarget(void* from)
{
return readPointer(reinterpret_cast<uint16_t*>(from) - 1);
}
static void repatchInt32(void* where, int32_t value)
{
......
......@@ -1561,6 +1561,11 @@ protected:
{
return static_cast<ARMv7Assembler::Condition>(cond);
}
static FunctionPtr readCallTarget(CodeLocationCall call)
{
return ARMv7Assembler::readCallTarget(call.dataLocation());
}
private:
friend class LinkBuffer;
......
......@@ -205,6 +205,12 @@ public:
static bool supportsFloatingPointTruncate() { return isSSE2Present(); }
static bool supportsFloatingPointSqrt() { return isSSE2Present(); }
static bool supportsFloatingPointAbs() { return isSSE2Present(); }
static FunctionPtr readCallTarget(CodeLocationCall call)
{
intptr_t offset = reinterpret_cast<int32_t*>(call.dataLocation())[-1];
return FunctionPtr(reinterpret_cast<void*>(reinterpret_cast<uintptr_t>(call.dataLocation()) + offset));
}
private:
friend class LinkBuffer;
......
......@@ -497,6 +497,11 @@ public:
static bool supportsFloatingPointTruncate() { return true; }
static bool supportsFloatingPointSqrt() { return true; }
static bool supportsFloatingPointAbs() { return true; }
static FunctionPtr readCallTarget(CodeLocationCall call)
{
return FunctionPtr(X86Assembler::readPointer(call.dataLabelPtrAtOffset(-REPTACH_OFFSET_CALL_R11).dataLocation()));
}
private:
friend class LinkBuffer;
......
......@@ -171,6 +171,8 @@ namespace JSC {
{
seen = true;
}
void reset(RepatchBuffer&, JITCode::JITType);
unsigned bytecodeIndex;
CodeLocationCall callReturnLocation;
......@@ -250,7 +252,7 @@ namespace JSC {
}
#endif
class CodeBlock : public UnconditionalFinalizer {
class CodeBlock : public UnconditionalFinalizer, public WeakReferenceHarvester {
WTF_MAKE_FAST_ALLOCATED;
friend class JIT;
public:
......@@ -294,10 +296,6 @@ namespace JSC {
bool canProduceCopyWithBytecode() { return hasInstructions(); }
void visitAggregate(SlotVisitor&);
// Call this if you are not jettisoning a code block, and thus
// have no evidence to suggest that it will never be called into again.
void stronglyVisitWeakReferences(SlotVisitor&);
static void dumpStatistics();
......@@ -537,7 +535,7 @@ namespace JSC {
JITCode::JITType getJITType() { return m_jitCode.jitType(); }
ExecutableMemoryHandle* executableMemory() { return getJITCode().getExecutableMemory(); }
virtual JSObject* compileOptimized(ExecState*, ScopeChainNode*) = 0;
virtual void jettison(JSGlobalData&) = 0;
virtual void jettison() = 0;
virtual CodeBlock* replacement() = 0;
virtual bool canCompileWithDFG() = 0;
bool hasOptimizedReplacement()
......@@ -1060,10 +1058,10 @@ namespace JSC {
#endif
#if ENABLE(JIT)
void reoptimize(JSGlobalData& globalData)
void reoptimize()
{
ASSERT(replacement() != this);
replacement()->jettison(globalData);
replacement()->jettison();
countReoptimization();
optimizeAfterWarmUp();
}
......@@ -1085,6 +1083,7 @@ namespace JSC {
bool m_shouldDiscardBytecode;
protected:
virtual void visitWeakReferences(SlotVisitor&);
virtual void finalizeUnconditionally();
private:
......@@ -1101,6 +1100,33 @@ namespace JSC {
void printPutByIdOp(ExecState*, int location, Vector<Instruction>::const_iterator&, const char* op) const;
#endif
void visitStructures(SlotVisitor&, Instruction* vPC) const;
#if ENABLE(DFG_JIT)
bool shouldImmediatelyAssumeLivenessDuringScan()
{
// Null m_dfgData means that this is a baseline JIT CodeBlock. Baseline JIT
// CodeBlocks don't need to be jettisoned when their weak references go
// stale. So if a basline JIT CodeBlock gets scanned, we can assume that
// this means that it's live.
if (!m_dfgData)
return true;
// For simplicity, we don't attempt to jettison code blocks during GC if
// they are executing. Instead we strongly mark their weak references to
// allow them to continue to execute soundly.
if (m_dfgData->mayBeExecuting)
return true;
return false;
}
#else
bool shouldImmediatelyAssumeLivenessDuringScan() { return true; }
#endif
void performTracingFixpointIteration(SlotVisitor&);
void stronglyVisitStrongReferences(SlotVisitor&);
void stronglyVisitWeakReferences(SlotVisitor&);
void createRareDataIfNecessary()
{
......@@ -1177,6 +1203,8 @@ namespace JSC {
Vector<WriteBarrier<JSCell> > weakReferences;
bool mayBeExecuting;
bool isJettisoned;
bool livenessHasBeenProved; // Initialized and used on every GC.
bool allTransitionsHaveBeenMarked; // Initialized and used on every GC.
};
OwnPtr<DFGData> m_dfgData;
......@@ -1278,7 +1306,7 @@ namespace JSC {
#if ENABLE(JIT)
protected:
virtual JSObject* compileOptimized(ExecState*, ScopeChainNode*);
virtual void jettison(JSGlobalData&);
virtual void jettison();
virtual CodeBlock* replacement();
virtual bool canCompileWithDFG();
#endif
......@@ -1312,7 +1340,7 @@ namespace JSC {
#if ENABLE(JIT)
protected:
virtual JSObject* compileOptimized(ExecState*, ScopeChainNode*);
virtual void jettison(JSGlobalData&);
virtual void jettison();
virtual CodeBlock* replacement();
virtual bool canCompileWithDFG();
#endif
......@@ -1349,7 +1377,7 @@ namespace JSC {
#if ENABLE(JIT)
protected:
virtual JSObject* compileOptimized(ExecState*, ScopeChainNode*);
virtual void jettison(JSGlobalData&);
virtual void jettison();
virtual CodeBlock* replacement();
virtual bool canCompileWithDFG();
#endif
......
......@@ -114,7 +114,7 @@ namespace JSC {
list[0].set(globalData, owner, stubRoutine, firstBase, firstChain, isDirect);
}
void visitAggregate(SlotVisitor& visitor, int count)
bool visitWeak(int count)
{
for (int i = 0; i < count; ++i) {
PolymorphicStubInfo& info = list[i];
......@@ -124,12 +124,17 @@ namespace JSC {
continue;
}
visitor.append(&info.base);
if (info.u.proto && !info.isChain)
visitor.append(&info.u.proto);
if (info.u.chain && info.isChain)
visitor.append(&info.u.chain);
if (!Heap::isMarked(info.base.get()))
return false;
if (info.u.proto && !info.isChain
&& !Heap::isMarked(info.u.proto.get()))
return false;
if (info.u.chain && info.isChain
&& !Heap::isMarked(info.u.chain.get()))
return false;
}
return true;
}
};
......
......@@ -62,48 +62,55 @@ void StructureStubInfo::deref()
}
}
void StructureStubInfo::visitAggregate(SlotVisitor& visitor)
bool StructureStubInfo::visitWeakReferences()
{
switch (accessType) {
case access_get_by_id_self:
visitor.append(&u.getByIdSelf.baseObjectStructure);
return;
if (!Heap::isMarked(u.getByIdSelf.baseObjectStructure.get()))
return false;
break;
case access_get_by_id_proto:
visitor.append(&u.getByIdProto.baseObjectStructure);
visitor.append(&u.getByIdProto.prototypeStructure);
return;
if (!Heap::isMarked(u.getByIdProto.baseObjectStructure.get())
|| !Heap::isMarked(u.getByIdProto.prototypeStructure.get()))
return false;
break;
case access_get_by_id_chain:
visitor.append(&u.getByIdChain.baseObjectStructure);
visitor.append(&u.getByIdChain.chain);
return;
if (!Heap::isMarked(u.getByIdChain.baseObjectStructure.get())
|| !Heap::isMarked(u.getByIdChain.chain.get()))
return false;
break;
case access_get_by_id_self_list: {
PolymorphicAccessStructureList* polymorphicStructures = u.getByIdSelfList.structureList;
polymorphicStructures->visitAggregate(visitor, u.getByIdSelfList.listSize);
return;
if (!polymorphicStructures->visitWeak(u.getByIdSelfList.listSize)) {
delete polymorphicStructures;
return false;
}
break;
}
case access_get_by_id_proto_list: {
PolymorphicAccessStructureList* polymorphicStructures = u.getByIdProtoList.structureList;
polymorphicStructures->visitAggregate(visitor, u.getByIdProtoList.listSize);
return;
if (!polymorphicStructures->visitWeak(u.getByIdSelfList.listSize)) {
delete polymorphicStructures;
return false;
}
break;
}
case access_put_by_id_transition:
visitor.append(&u.putByIdTransition.previousStructure);
visitor.append(&u.putByIdTransition.structure);
visitor.append(&u.putByIdTransition.chain);
return;
if (!Heap::isMarked(u.putByIdTransition.previousStructure.get())
|| !Heap::isMarked(u.putByIdTransition.structure.get())
|| !Heap::isMarked(u.putByIdTransition.chain.get()))
return false;
break;
case access_put_by_id_replace:
visitor.append(&u.putByIdReplace.baseObjectStructure);
return;
case access_unset:
case access_get_by_id_generic:
case access_put_by_id_generic:
case access_get_array_length:
case access_get_string_length:
// These instructions don't need to mark anything
return;
if (!Heap::isMarked(u.putByIdReplace.baseObjectStructure.get()))
return false;
break;
default:
ASSERT_NOT_REACHED();
// The rest of the instructions don't require references, so there is no need to
// do anything.
break;
}
return true;
}
#endif
......
......@@ -50,6 +50,35 @@ namespace JSC {
access_get_string_length,
};
inline bool isGetByIdAccess(AccessType accessType)
{
switch (accessType) {
case access_get_by_id_self:
case access_get_by_id_proto:
case access_get_by_id_chain:
case access_get_by_id_self_list:
case access_get_by_id_proto_list:
case access_get_by_id_generic:
case access_get_array_length:
case access_get_string_length:
return true;
default:
return false;
}
}
inline bool isPutByIdAccess(AccessType accessType)
{
switch (accessType) {
case access_put_by_id_transition:
case access_put_by_id_replace:
case access_put_by_id_generic:
return true;
default:
return false;
}
}
struct StructureStubInfo {
StructureStubInfo()
: accessType(access_unset)
......@@ -113,10 +142,18 @@ namespace JSC {
u.putByIdReplace.baseObjectStructure.set(globalData, owner, baseObjectStructure);
}
void reset()
{
accessType = access_unset;
stubRoutine = MacroAssemblerCodeRef();
}
void deref();
void visitAggregate(SlotVisitor&);
bool visitWeakReferences();
bool seenOnce()
{
return seen;
......@@ -142,17 +179,18 @@ namespace JSC {
int16_t deltaCallToDone;
int16_t deltaCallToStructCheck;
int16_t deltaCallToSlowCase;
int16_t deltaCheckImmToCall;
#if USE(JSVALUE64)
int16_t deltaCallToLoadOrStore;
#else
int16_t deltaCallToTagLoadOrStore;
int16_t deltaCallToPayloadLoadOrStore;
#endif
#endif // ENABLE(DFG_JIT)
union {
struct {
int16_t deltaCheckImmToCall;
#if USE(JSVALUE64)
int16_t deltaCallToLoadOrStore;
#elif USE(JSVALUE32_64)
int16_t deltaCallToTagLoadOrStore;
int16_t deltaCallToPayloadLoadOrStore;
#endif
// It would be unwise to put anything here, as it will surely be overwritten.
} unset;
struct {
WriteBarrierBase<Structure> baseObjectStructure;
......
......@@ -157,13 +157,13 @@ void JITCompiler::link(LinkBuffer& linkBuffer)
StructureStubInfo& info = m_codeBlock->structureStubInfo(i);
CodeLocationCall callReturnLocation = linkBuffer.locationOf(m_propertyAccesses[i].m_functionCall);
info.callReturnLocation = callReturnLocation;
info.u.unset.deltaCheckImmToCall = differenceBetweenCodePtr(linkBuffer.locationOf(m_propertyAccesses[i].m_deltaCheckImmToCall), callReturnLocation);
info.deltaCheckImmToCall = differenceBetweenCodePtr(linkBuffer.locationOf(m_propertyAccesses[i].m_deltaCheckImmToCall), callReturnLocation);
info.deltaCallToStructCheck = differenceBetweenCodePtr(callReturnLocation, linkBuffer.locationOf(m_propertyAccesses[i].m_deltaCallToStructCheck));
#if USE(JSVALUE64)
info.u.unset.deltaCallToLoadOrStore = differenceBetweenCodePtr(callReturnLocation, linkBuffer.locationOf(m_propertyAccesses[i].m_deltaCallToLoadOrStore));
info.deltaCallToLoadOrStore = differenceBetweenCodePtr(callReturnLocation, linkBuffer.locationOf(m_propertyAccesses[i].m_deltaCallToLoadOrStore));
#else
info.u.unset.deltaCallToTagLoadOrStore = differenceBetweenCodePtr(callReturnLocation, linkBuffer.locationOf(m_propertyAccesses[i].m_deltaCallToTagLoadOrStore));
info.u.unset.deltaCallToPayloadLoadOrStore = differenceBetweenCodePtr(callReturnLocation, linkBuffer.locationOf(m_propertyAccesses[i].m_deltaCallToPayloadLoadOrStore));
info.deltaCallToTagLoadOrStore = differenceBetweenCodePtr(callReturnLocation, linkBuffer.locationOf(m_propertyAccesses[i].m_deltaCallToTagLoadOrStore));
info.deltaCallToPayloadLoadOrStore = differenceBetweenCodePtr(callReturnLocation, linkBuffer.locationOf(m_propertyAccesses[i].m_deltaCallToPayloadLoadOrStore));
#endif
info.deltaCallToSlowCase = differenceBetweenCodePtr(callReturnLocation, linkBuffer.locationOf(m_propertyAccesses[i].m_deltaCallToSlowCase));
info.deltaCallToDone = differenceBetweenCodePtr(callReturnLocation, linkBuffer.locationOf(m_propertyAccesses[i].m_deltaCallToDone));
......
......@@ -325,8 +325,9 @@ EncodedJSValue DFG_OPERATION operationGetMethodOptimizeWithReturnAddress(ExecSta
JSValue baseValue(base);
PropertySlot slot(baseValue);
JSValue result = baseValue.get(exec, *propertyName, slot);
MethodCallLinkInfo& methodInfo = exec->codeBlock()->getMethodCallLinkInfo(returnAddress);
CodeBlock* codeBlock = exec->codeBlock();
MethodCallLinkInfo& methodInfo = codeBlock->getMethodCallLinkInfo(returnAddress);
if (methodInfo.seenOnce())
dfgRepatchGetMethod(exec, baseValue, *propertyName, slot, methodInfo);
else
......@@ -367,7 +368,7 @@ EncodedJSValue DFG_OPERATION operationGetByIdOptimizeWithReturnAddress(ExecState
JSValue baseValue(base);
PropertySlot slot(baseValue);
JSValue result = baseValue.get(exec, *propertyName, slot);
StructureStubInfo& stubInfo = exec->codeBlock()->getStubInfo(returnAddress);
if (stubInfo.seen)
dfgRepatchGetByID(exec, baseValue, *propertyName, slot, stubInfo);
......
......@@ -49,19 +49,19 @@ static void dfgRepatchByIdSelfAccess(CodeBlock* codeBlock, StructureStubInfo& st
repatchBuffer.relink(stubInfo.callReturnLocation, slowPathFunction);
// Patch the structure check & the offset of the load.
repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelPtrAtOffset(-(intptr_t)stubInfo.u.unset.deltaCheckImmToCall), structure);
repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelPtrAtOffset(-(intptr_t)stubInfo.deltaCheckImmToCall), structure);
#if USE(JSVALUE64)
if (compact)
repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.u.unset.deltaCallToLoadOrStore), sizeof(JSValue) * offset);
repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.deltaCallToLoadOrStore), sizeof(JSValue) * offset);
else
repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.u.unset.deltaCallToLoadOrStore), sizeof(JSValue) * offset);
repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.deltaCallToLoadOrStore), sizeof(JSValue) * offset);
#elif USE(JSVALUE32_64)
if (compact) {
repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.u.unset.deltaCallToTagLoadOrStore), sizeof(JSValue) * offset + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag));
repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.u.unset.deltaCallToPayloadLoadOrStore), sizeof(JSValue) * offset + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload));
repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.deltaCallToTagLoadOrStore), sizeof(JSValue) * offset + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag));
repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.deltaCallToPayloadLoadOrStore), sizeof(JSValue) * offset + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload));
} else {
repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.u.unset.deltaCallToTagLoadOrStore), sizeof(JSValue) * offset + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag));
repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.u.unset.deltaCallToPayloadLoadOrStore), sizeof(JSValue) * offset + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload));
repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.deltaCallToTagLoadOrStore), sizeof(JSValue) * offset + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag));
repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.deltaCallToPayloadLoadOrStore), sizeof(JSValue) * offset + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload));
}
#endif
}
......@@ -651,6 +651,44 @@ void dfgLinkFor(ExecState* exec, CallLinkInfo& callLinkInfo, CodeBlock* calleeCo
repatchBuffer.relink(CodeLocationCall(callLinkInfo.callReturnLocation), operationVirtualConstruct);
}
void dfgResetGetByID(RepatchBuffer& repatchBuffer, StructureStubInfo& stubInfo)
{
repatchBuffer.relink(stubInfo.callReturnLocation, operationGetByIdOptimize);
repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelPtrAtOffset(-(uintptr_t)stubInfo.deltaCheckImmToCall), reinterpret_cast<void*>(-1));
#if USE(JSVALUE64)
repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.deltaCallToLoadOrStore), 0);
#else
repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.deltaCallToTagLoadOrStore), 0);
repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.deltaCallToPayloadLoadOrStore), 0);
#endif
repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.deltaCallToStructCheck), stubInfo.callReturnLocation.labelAtOffset(stubInfo.deltaCallToSlowCase));
}
void dfgResetPutByID(RepatchBuffer& repatchBuffer, StructureStubInfo& stubInfo)
{
V_DFGOperation_EJCI unoptimizedFunction = bitwise_cast<V_DFGOperation_EJCI>(MacroAssembler::readCallTarget(stubInfo.callReturnLocation).executableAddress());
V_DFGOperation_EJCI optimizedFunction;
if (unoptimizedFunction == operationPutByIdStrict)
optimizedFunction = operationPutByIdStrictOptimize;
else if (unoptimizedFunction == operationPutByIdNonStrict)
optimizedFunction = operationPutByIdNonStrictOptimize;
else if (unoptimizedFunction == operationPutByIdDirectStrict)
optimizedFunction = operationPutByIdDirectStrictOptimize;
else {
ASSERT(unoptimizedFunction == operationPutByIdDirectNonStrict);
optimizedFunction = operationPutByIdDirectNonStrictOptimize;
}
repatchBuffer.relink(stubInfo.callReturnLocation, optimizedFunction);
repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelPtrAtOffset(-(uintptr_t)stubInfo.deltaCheckImmToCall), reinterpret_cast<void*>(-1));
#if USE(JSVALUE64)
repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.deltaCallToLoadOrStore), 0);
#else
repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.deltaCallToTagLoadOrStore), 0);
repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.deltaCallToPayloadLoadOrStore), 0);
#endif
repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.deltaCallToStructCheck), stubInfo.callReturnLocation.labelAtOffset(stubInfo.deltaCallToSlowCase));
}
} } // namespace JSC::DFG
#endif
......@@ -39,8 +39,21 @@ void dfgBuildGetByIDList(ExecState*, JSValue, const Identifier&, const PropertyS
void dfgBuildGetByIDProtoList(ExecState*, JSValue, const Identifier&, const PropertySlot&, StructureStubInfo&);
void dfgRepatchPutByID(ExecState*, JSValue, const Identifier&, const PutPropertySlot&, StructureStubInfo&, PutKind);
void dfgLinkFor(ExecState*, CallLinkInfo&, CodeBlock*, JSFunction* callee, MacroAssemblerCodePtr, CodeSpecializationKind);
void dfgResetGetByID(RepatchBuffer&, StructureStubInfo&);
void dfgResetPutByID(RepatchBuffer&, StructureStubInfo&);
} } // namespace JSC::DFG
#endif
#endif
#else // ENABLE(DFG_JIT)
#include <wtf/Assertions.h>
namespace JSC { namespace DFG {
void dfgResetGetByID(RepatchBuffer&, StructureStubInfo&) { ASSERT_NOT_REACHED(); }
void dfgResetPutByID(RepatchBuffer&, StructureStubInfo&) { ASSERT_NOT_REACHED(); }
} } // namespace JSC::DFG
#endif // ENABLE(DFG_JIT)
#endif // DFGRepatch_h
......@@ -254,6 +254,8 @@ namespace JSC {
return jit.privateCompileCTINativeCall(globalData, func);
}
static void resetPatchGetById(RepatchBuffer&, StructureStubInfo*);
static void resetPatchPutById(RepatchBuffer&, StructureStubInfo*);
static void patchGetByIdSelf(CodeBlock* codeblock, StructureStubInfo*, Structure*, size_t cachedOffset, ReturnAddressPtr returnAddress);
static void patchPutByIdReplace(CodeBlock* codeblock, StructureStubInfo*, Structure*, size_t cachedOffset, ReturnAddressPtr returnAddress, bool direct);
static void patchMethodCallProto(JSGlobalData&, CodeBlock* codeblock, MethodCallLinkInfo&, JSObject*, Structure*, JSObject*, ReturnAddressPtr);
......
......@@ -1044,6 +1044,21 @@ void JIT::emit_op_put_global_var(Instruction* currentInstruction)
emitWriteBarrier(globalObject, regT0, regT2, ShouldFilterImmediates, WriteBarrierForVariableAccess);
}
void JIT::resetPatchGetById(RepatchBuffer& repatchBuffer, StructureStubInfo* stubInfo)
{
repatchBuffer.relink(stubInfo->callReturnLocation, cti_op_get_by_id);
repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelPtrAtOffset(patchOffsetGetByIdStructure), reinterpret_cast<void*>(-1));
repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelCompactAtOffset(patchOffsetGetByIdPropertyMapOffset), 0);
repatchBuffer.relink(stubInfo->hotPathBegin.jumpAtOffset(patchOffsetGetByIdBranchToSlowCase), stubInfo->callReturnLocation.labelAtOffset(-patchOffsetGetByIdSlowCaseCall));
}