aboutsummaryrefslogtreecommitdiff
path: root/src/ARMJIT_A64
diff options
context:
space:
mode:
authorArisotura <thetotalworm@gmail.com>2021-11-17 18:41:59 +0100
committerArisotura <thetotalworm@gmail.com>2021-11-17 18:41:59 +0100
commit69715043cad65db6e5adb252746f9ec4c1f61087 (patch)
tree267ff33fa395445a6b37ab2fda0f4e1fcb191805 /src/ARMJIT_A64
parentc1dcd585be0d7789bf68f098c443b7cadff28965 (diff)
blfdlkgdfgdf
Diffstat (limited to 'src/ARMJIT_A64')
-rw-r--r--src/ARMJIT_A64/ARMJIT_LoadStore.cpp12
1 files changed, 6 insertions, 6 deletions
diff --git a/src/ARMJIT_A64/ARMJIT_LoadStore.cpp b/src/ARMJIT_A64/ARMJIT_LoadStore.cpp
index d9b7a68..7ce6d24 100644
--- a/src/ARMJIT_A64/ARMJIT_LoadStore.cpp
+++ b/src/ARMJIT_A64/ARMJIT_LoadStore.cpp
@@ -18,7 +18,7 @@
#include "ARMJIT_Compiler.h"
-#include "../Config.h"
+#include "../ARMJIT.h"
#include "../ARMJIT_Memory.h"
@@ -111,7 +111,7 @@ void Compiler::Comp_MemAccess(int rd, int rn, Op2 offset, int size, int flags)
if (size == 16)
addressMask = ~1;
- if (Config::JIT_LiteralOptimisations && rn == 15 && rd != 15 && offset.IsImm && !(flags & (memop_Post|memop_Store|memop_Writeback)))
+ if (ARMJIT::LiteralOptimizations && rn == 15 && rd != 15 && offset.IsImm && !(flags & (memop_Post|memop_Store|memop_Writeback)))
{
u32 addr = R15 + offset.Imm * ((flags & memop_SubtractOffset) ? -1 : 1);
@@ -146,7 +146,7 @@ void Compiler::Comp_MemAccess(int rd, int rn, Op2 offset, int size, int flags)
MOV(W0, rnMapped);
}
- bool addrIsStatic = Config::JIT_LiteralOptimisations
+ bool addrIsStatic = ARMJIT::LiteralOptimizations
&& RegCache.IsLiteral(rn) && offset.IsImm && !(flags & (memop_Writeback|memop_Post));
u32 staticAddress;
if (addrIsStatic)
@@ -188,7 +188,7 @@ void Compiler::Comp_MemAccess(int rd, int rn, Op2 offset, int size, int flags)
? ARMJIT_Memory::ClassifyAddress9(addrIsStatic ? staticAddress : CurInstr.DataRegion)
: ARMJIT_Memory::ClassifyAddress7(addrIsStatic ? staticAddress : CurInstr.DataRegion);
- if (Config::JIT_FastMemory && ((!Thumb && CurInstr.Cond() != 0xE) || ARMJIT_Memory::IsFastmemCompatible(expectedTarget)))
+ if (ARMJIT::FastMemory && ((!Thumb && CurInstr.Cond() != 0xE) || ARMJIT_Memory::IsFastmemCompatible(expectedTarget)))
{
ptrdiff_t memopStart = GetCodeOffset();
LoadStorePatch patch;
@@ -452,7 +452,7 @@ void Compiler::T_Comp_LoadPCRel()
u32 offset = ((CurInstr.Instr & 0xFF) << 2);
u32 addr = (R15 & ~0x2) + offset;
- if (!Config::JIT_LiteralOptimisations || !Comp_MemLoadLiteral(32, false, CurInstr.T_Reg(8), addr))
+ if (!ARMJIT::LiteralOptimizations || !Comp_MemLoadLiteral(32, false, CurInstr.T_Reg(8), addr))
Comp_MemAccess(CurInstr.T_Reg(8), 15, Op2(offset), 32, 0);
}
@@ -497,7 +497,7 @@ s32 Compiler::Comp_MemAccessBlock(int rn, BitSet16 regs, bool store, bool preinc
? ARMJIT_Memory::ClassifyAddress9(CurInstr.DataRegion)
: ARMJIT_Memory::ClassifyAddress7(CurInstr.DataRegion);
- bool compileFastPath = Config::JIT_FastMemory
+ bool compileFastPath = ARMJIT::FastMemory
&& store && !usermode && (CurInstr.Cond() < 0xE || ARMJIT_Memory::IsFastmemCompatible(expectedTarget));
{