mirror of
https://github.com/dolphin-emu/dolphin.git
synced 2024-11-14 21:37:52 -07:00
Jit64: Keep track of free code regions and reuse space when possible.
This commit is contained in:
parent
fdab9783c7
commit
306a5e6990
@ -376,17 +376,30 @@ void Jit64::Init()
|
||||
code_block.m_gpa = &js.gpa;
|
||||
code_block.m_fpa = &js.fpa;
|
||||
EnableOptimization();
|
||||
|
||||
ResetFreeMemoryRanges();
|
||||
}
|
||||
|
||||
void Jit64::ClearCache()
|
||||
{
|
||||
blocks.Clear();
|
||||
blocks.ClearRangesToFree();
|
||||
trampolines.ClearCodeSpace();
|
||||
m_far_code.ClearCodeSpace();
|
||||
m_const_pool.Clear();
|
||||
ClearCodeSpace();
|
||||
Clear();
|
||||
UpdateMemoryOptions();
|
||||
ResetFreeMemoryRanges();
|
||||
}
|
||||
|
||||
void Jit64::ResetFreeMemoryRanges()
|
||||
{
|
||||
// Set the entire near and far code regions as unused.
|
||||
m_free_ranges_near.clear();
|
||||
m_free_ranges_near.insert(region, region + region_size);
|
||||
m_free_ranges_far.clear();
|
||||
m_free_ranges_far.insert(m_far_code.GetWritableCodePtr(), m_far_code.GetWritableCodeEnd());
|
||||
}
|
||||
|
||||
void Jit64::Shutdown()
|
||||
@ -721,6 +734,11 @@ void Jit64::Trace()
|
||||
}
|
||||
|
||||
void Jit64::Jit(u32 em_address)
|
||||
{
|
||||
Jit(em_address, true);
|
||||
}
|
||||
|
||||
void Jit64::Jit(u32 em_address, bool clear_cache_and_retry_on_failure)
|
||||
{
|
||||
if (m_cleanup_after_stackfault)
|
||||
{
|
||||
@ -732,18 +750,23 @@ void Jit64::Jit(u32 em_address)
|
||||
#endif
|
||||
}
|
||||
|
||||
if (IsAlmostFull() || m_far_code.IsAlmostFull() || trampolines.IsAlmostFull() ||
|
||||
SConfig::GetInstance().bJITNoBlockCache)
|
||||
if (trampolines.IsAlmostFull() || SConfig::GetInstance().bJITNoBlockCache)
|
||||
{
|
||||
if (!SConfig::GetInstance().bJITNoBlockCache)
|
||||
{
|
||||
const auto reason =
|
||||
IsAlmostFull() ? "main" : m_far_code.IsAlmostFull() ? "far" : "trampoline";
|
||||
WARN_LOG(POWERPC, "flushing %s code cache, please report if this happens a lot", reason);
|
||||
WARN_LOG(POWERPC, "flushing trampoline code cache, please report if this happens a lot");
|
||||
}
|
||||
ClearCache();
|
||||
}
|
||||
|
||||
// Check if any code blocks have been freed in the block cache and transfer this information to
|
||||
// the local rangesets to allow overwriting them with new code.
|
||||
for (auto range : blocks.GetRangesToFreeNear())
|
||||
m_free_ranges_near.insert(range.first, range.second);
|
||||
for (auto range : blocks.GetRangesToFreeFar())
|
||||
m_free_ranges_far.insert(range.first, range.second);
|
||||
blocks.ClearRangesToFree();
|
||||
|
||||
std::size_t block_size = m_code_buffer.size();
|
||||
|
||||
if (SConfig::GetInstance().bEnableDebugging)
|
||||
@ -786,12 +809,75 @@ void Jit64::Jit(u32 em_address)
|
||||
return;
|
||||
}
|
||||
|
||||
JitBlock* b = blocks.AllocateBlock(em_address);
|
||||
DoJit(em_address, b, nextPC);
|
||||
blocks.FinalizeBlock(*b, jo.enableBlocklink, code_block.m_physical_addresses);
|
||||
if (SetEmitterStateToFreeCodeRegion())
|
||||
{
|
||||
u8* near_start = GetWritableCodePtr();
|
||||
u8* far_start = m_far_code.GetWritableCodePtr();
|
||||
|
||||
JitBlock* b = blocks.AllocateBlock(em_address);
|
||||
if (DoJit(em_address, b, nextPC))
|
||||
{
|
||||
// Code generation succeeded.
|
||||
|
||||
// Mark the memory regions that this code block uses as used in the local rangesets.
|
||||
u8* near_end = GetWritableCodePtr();
|
||||
if (near_start != near_end)
|
||||
m_free_ranges_near.erase(near_start, near_end);
|
||||
u8* far_end = m_far_code.GetWritableCodePtr();
|
||||
if (far_start != far_end)
|
||||
m_free_ranges_far.erase(far_start, far_end);
|
||||
|
||||
// Store the used memory regions in the block so we know what to mark as unused when the
|
||||
// block gets invalidated.
|
||||
b->near_begin = near_start;
|
||||
b->near_end = near_end;
|
||||
b->far_begin = far_start;
|
||||
b->far_end = far_end;
|
||||
|
||||
blocks.FinalizeBlock(*b, jo.enableBlocklink, code_block.m_physical_addresses);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
if (clear_cache_and_retry_on_failure)
|
||||
{
|
||||
// Code generation failed due to not enough free space in either the near or far code regions.
|
||||
// Clear the entire JIT cache and retry.
|
||||
WARN_LOG(POWERPC, "flushing code caches, please report if this happens a lot");
|
||||
ClearCache();
|
||||
Jit(em_address, false);
|
||||
return;
|
||||
}
|
||||
|
||||
PanicAlertT("JIT failed to find code space after a cache clear. This should never happen. Please "
|
||||
"report this incident on the bug tracker. Dolphin will now exit.");
|
||||
exit(-1);
|
||||
}
|
||||
|
||||
u8* Jit64::DoJit(u32 em_address, JitBlock* b, u32 nextPC)
|
||||
bool Jit64::SetEmitterStateToFreeCodeRegion()
|
||||
{
|
||||
// Find the largest free memory blocks and set code emitters to point at them.
|
||||
// If we can't find a free block return false instead, which will trigger a JIT cache clear.
|
||||
auto free_near = m_free_ranges_near.by_size_begin();
|
||||
if (free_near == m_free_ranges_near.by_size_end())
|
||||
{
|
||||
WARN_LOG(POWERPC, "Failed to find free memory region in near code region.");
|
||||
return false;
|
||||
}
|
||||
SetCodePtr(free_near.from(), free_near.to());
|
||||
|
||||
auto free_far = m_free_ranges_far.by_size_begin();
|
||||
if (free_far == m_free_ranges_far.by_size_end())
|
||||
{
|
||||
WARN_LOG(POWERPC, "Failed to find free memory region in far code region.");
|
||||
return false;
|
||||
}
|
||||
m_far_code.SetCodePtr(free_far.from(), free_far.to());
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
bool Jit64::DoJit(u32 em_address, JitBlock* b, u32 nextPC)
|
||||
{
|
||||
js.firstFPInstructionFound = false;
|
||||
js.isLastInstruction = false;
|
||||
@ -1092,6 +1178,16 @@ u8* Jit64::DoJit(u32 em_address, JitBlock* b, u32 nextPC)
|
||||
WriteExit(nextPC);
|
||||
}
|
||||
|
||||
if (HasWriteFailed() || m_far_code.HasWriteFailed())
|
||||
{
|
||||
if (HasWriteFailed())
|
||||
WARN_LOG(POWERPC, "JIT ran out of space in near code region during code generation.");
|
||||
if (m_far_code.HasWriteFailed())
|
||||
WARN_LOG(POWERPC, "JIT ran out of space in far code region during code generation.");
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
b->codeSize = (u32)(GetCodePtr() - start);
|
||||
b->originalSize = code_block.m_num_instructions;
|
||||
|
||||
@ -1099,7 +1195,7 @@ u8* Jit64::DoJit(u32 em_address, JitBlock* b, u32 nextPC)
|
||||
LogGeneratedX86(code_block.m_num_instructions, m_code_buffer, start, b);
|
||||
#endif
|
||||
|
||||
return start;
|
||||
return true;
|
||||
}
|
||||
|
||||
BitSet8 Jit64::ComputeStaticGQRs(const PPCAnalyst::CodeBlock& cb) const
|
||||
|
@ -18,6 +18,8 @@
|
||||
// ----------
|
||||
#pragma once
|
||||
|
||||
#include <rangeset/rangesizeset.h>
|
||||
|
||||
#include "Common/CommonTypes.h"
|
||||
#include "Common/x64ABI.h"
|
||||
#include "Common/x64Emitter.h"
|
||||
@ -56,7 +58,12 @@ public:
|
||||
// Jit!
|
||||
|
||||
void Jit(u32 em_address) override;
|
||||
u8* DoJit(u32 em_address, JitBlock* b, u32 nextPC);
|
||||
void Jit(u32 em_address, bool clear_cache_and_retry_on_failure);
|
||||
bool DoJit(u32 em_address, JitBlock* b, u32 nextPC);
|
||||
|
||||
// Finds a free memory region and sets the near and far code emitters to point at that region.
|
||||
// Returns false if no free memory region can be found for either of the two.
|
||||
bool SetEmitterStateToFreeCodeRegion();
|
||||
|
||||
BitSet32 CallerSavedRegistersInUse() const;
|
||||
BitSet8 ComputeStaticGQRs(const PPCAnalyst::CodeBlock&) const;
|
||||
@ -243,6 +250,8 @@ private:
|
||||
void AllocStack();
|
||||
void FreeStack();
|
||||
|
||||
void ResetFreeMemoryRanges();
|
||||
|
||||
JitBlockCache blocks{*this};
|
||||
TrampolineCache trampolines{*this};
|
||||
|
||||
@ -254,6 +263,9 @@ private:
|
||||
bool m_enable_blr_optimization;
|
||||
bool m_cleanup_after_stackfault;
|
||||
u8* m_stack;
|
||||
|
||||
HyoutaUtilities::RangeSizeSet<u8*> m_free_ranges_near;
|
||||
HyoutaUtilities::RangeSizeSet<u8*> m_free_ranges_far;
|
||||
};
|
||||
|
||||
void LogGeneratedX86(size_t size, const PPCAnalyst::CodeBuffer& code_buffer, const u8* normalEntry,
|
||||
|
@ -53,3 +53,35 @@ void JitBlockCache::WriteDestroyBlock(const JitBlock& block)
|
||||
Gen::XEmitter emit2(block.normalEntry, block.normalEntry + 1);
|
||||
emit2.INT3();
|
||||
}
|
||||
|
||||
void JitBlockCache::Init()
|
||||
{
|
||||
JitBaseBlockCache::Init();
|
||||
ClearRangesToFree();
|
||||
}
|
||||
|
||||
void JitBlockCache::DestroyBlock(JitBlock& block)
|
||||
{
|
||||
JitBaseBlockCache::DestroyBlock(block);
|
||||
|
||||
if (block.near_begin != block.near_end)
|
||||
m_ranges_to_free_on_next_codegen_near.emplace_back(block.near_begin, block.near_end);
|
||||
if (block.far_begin != block.far_end)
|
||||
m_ranges_to_free_on_next_codegen_far.emplace_back(block.far_begin, block.far_end);
|
||||
}
|
||||
|
||||
const std::vector<std::pair<u8*, u8*>>& JitBlockCache::GetRangesToFreeNear() const
|
||||
{
|
||||
return m_ranges_to_free_on_next_codegen_near;
|
||||
}
|
||||
|
||||
const std::vector<std::pair<u8*, u8*>>& JitBlockCache::GetRangesToFreeFar() const
|
||||
{
|
||||
return m_ranges_to_free_on_next_codegen_far;
|
||||
}
|
||||
|
||||
void JitBlockCache::ClearRangesToFree()
|
||||
{
|
||||
m_ranges_to_free_on_next_codegen_near.clear();
|
||||
m_ranges_to_free_on_next_codegen_far.clear();
|
||||
}
|
||||
|
@ -4,6 +4,8 @@
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <vector>
|
||||
|
||||
#include "Core/PowerPC/JitCommon/JitCache.h"
|
||||
|
||||
class JitBase;
|
||||
@ -13,7 +15,19 @@ class JitBlockCache : public JitBaseBlockCache
|
||||
public:
|
||||
explicit JitBlockCache(JitBase& jit);
|
||||
|
||||
void Init() override;
|
||||
|
||||
void DestroyBlock(JitBlock& block) override;
|
||||
|
||||
const std::vector<std::pair<u8*, u8*>>& GetRangesToFreeNear() const;
|
||||
const std::vector<std::pair<u8*, u8*>>& GetRangesToFreeFar() const;
|
||||
|
||||
void ClearRangesToFree();
|
||||
|
||||
private:
|
||||
void WriteLinkBlock(const JitBlock::LinkData& source, const JitBlock* dest) override;
|
||||
void WriteDestroyBlock(const JitBlock& block) override;
|
||||
|
||||
std::vector<std::pair<u8*, u8*>> m_ranges_to_free_on_next_codegen_near;
|
||||
std::vector<std::pair<u8*, u8*>> m_ranges_to_free_on_next_codegen_far;
|
||||
};
|
||||
|
@ -22,6 +22,12 @@ class JitBase;
|
||||
// so this struct needs to have a standard layout.
|
||||
struct JitBlockData
|
||||
{
|
||||
// Memory range this code block takes up in near and far code caches.
|
||||
u8* near_begin;
|
||||
u8* near_end;
|
||||
u8* far_begin;
|
||||
u8* far_end;
|
||||
|
||||
// A special entry point for block linking; usually used to check the
|
||||
// downcount.
|
||||
u8* checkedEntry;
|
||||
@ -130,7 +136,7 @@ public:
|
||||
explicit JitBaseBlockCache(JitBase& jit);
|
||||
virtual ~JitBaseBlockCache();
|
||||
|
||||
void Init();
|
||||
virtual void Init();
|
||||
void Shutdown();
|
||||
void Clear();
|
||||
void Reset();
|
||||
@ -159,6 +165,8 @@ public:
|
||||
u32* GetBlockBitSet() const;
|
||||
|
||||
protected:
|
||||
virtual void DestroyBlock(JitBlock& block);
|
||||
|
||||
JitBase& m_jit;
|
||||
|
||||
private:
|
||||
@ -168,7 +176,6 @@ private:
|
||||
void LinkBlockExits(JitBlock& block);
|
||||
void LinkBlock(JitBlock& block);
|
||||
void UnlinkBlock(const JitBlock& block);
|
||||
void DestroyBlock(JitBlock& block);
|
||||
|
||||
JitBlock* MoveBlockIntoFastCache(u32 em_address, u32 msr);
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user