|
|
|
@ -1994,6 +1994,10 @@ bool NativeCodeInstruction::SameLinkerObjectVariableRange(const NativeCodeInstru
|
|
|
|
|
return mAddress == ins.mAddress;
|
|
|
|
|
else if (mLinkerObject && mLinkerObject->mStripe > 1)
|
|
|
|
|
return mAddress / mLinkerObject->mStripe == ins.mAddress / mLinkerObject->mStripe;
|
|
|
|
|
else if (mMode == ASMIM_ABSOLUTE && mAddress < ins.mAddress)
|
|
|
|
|
return false;
|
|
|
|
|
else if (ins.mMode == ASMIM_ABSOLUTE && ins.mAddress < mAddress)
|
|
|
|
|
return false;
|
|
|
|
|
else
|
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
@ -2889,21 +2893,45 @@ bool NativeCodeInstruction::BitFieldForwarding(NativeRegisterDataSet& data, AsmI
|
|
|
|
|
opmask = data.mRegs[mAddress].mMask;
|
|
|
|
|
opvalue = data.mRegs[mAddress].mValue;
|
|
|
|
|
}
|
|
|
|
|
else if (mMode == ASMIM_ABSOLUTE && mLinkerObject && (mLinkerObject->mFlags & LOBJF_CONST) && mLinkerObject->mReferences.Size() == 0 && mType != ASMIT_JSR)
|
|
|
|
|
{
|
|
|
|
|
opmask = 0xff;
|
|
|
|
|
opvalue = mLinkerObject->mData[mAddress];
|
|
|
|
|
}
|
|
|
|
|
#if 1
|
|
|
|
|
else if ((mMode == ASMIM_ABSOLUTE || mMode == ASMIM_ABSOLUTE_X || mMode == ASMIM_ABSOLUTE_Y) && mLinkerObject && (mLinkerObject->mFlags & LOBJF_CONST) && mLinkerObject->mSize <= 256 && mLinkerObject->mReferences.Size() == 0 && mType != ASMIT_JSR)
|
|
|
|
|
else if ((mMode == ASMIM_ABSOLUTE_X || mMode == ASMIM_ABSOLUTE_Y) && mLinkerObject && (mLinkerObject->mFlags & LOBJF_CONST) && mLinkerObject->mSize <= 256 && mLinkerObject->mReferences.Size() == 0)
|
|
|
|
|
{
|
|
|
|
|
int mor = 0;
|
|
|
|
|
int mand = 0xff;
|
|
|
|
|
for (int i = 0; i < mLinkerObject->mSize; i++)
|
|
|
|
|
int ior = 0;
|
|
|
|
|
int iand = 0xff;
|
|
|
|
|
|
|
|
|
|
if (mMode == ASMIM_ABSOLUTE_X)
|
|
|
|
|
{
|
|
|
|
|
mor |= mLinkerObject->mData[i];
|
|
|
|
|
mand &= mLinkerObject->mData[i];
|
|
|
|
|
ior = data.mRegs[CPU_REG_X].mMask & data.mRegs[CPU_REG_X].mValue;
|
|
|
|
|
iand = (~data.mRegs[CPU_REG_X].mMask | data.mRegs[CPU_REG_X].mValue) & 0xff;
|
|
|
|
|
}
|
|
|
|
|
else if (mMode == ASMIM_ABSOLUTE_Y)
|
|
|
|
|
{
|
|
|
|
|
ior = data.mRegs[CPU_REG_Y].mMask & data.mRegs[CPU_REG_Y].mValue;
|
|
|
|
|
iand = (~data.mRegs[CPU_REG_Y].mMask | data.mRegs[CPU_REG_Y].mValue) & 0xff;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
for (int i = 0; i < mLinkerObject->mSize - mAddress; i++)
|
|
|
|
|
{
|
|
|
|
|
if ((i & ~iand) == 0 && (i & ior) == ior)
|
|
|
|
|
{
|
|
|
|
|
mor |= mLinkerObject->mData[mAddress + i];
|
|
|
|
|
mand &= mLinkerObject->mData[mAddress + i];
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
opmask = (mand | ~mor) & 0xff;
|
|
|
|
|
opvalue = mand;
|
|
|
|
|
|
|
|
|
|
#if 0
|
|
|
|
|
if (opmask)
|
|
|
|
|
printf("Check %s, %02x %02x\n", mLinkerObject->mIdent->mString, opmask, opvalue);
|
|
|
|
|
if (CheckFunc)
|
|
|
|
|
printf("Check %s + %d (%02x, %02x), %02x %02x\n", mLinkerObject->mIdent->mString, mAddress, ior, iand, opmask, opvalue);
|
|
|
|
|
|
|
|
|
|
#endif
|
|
|
|
|
}
|
|
|
|
|
#endif
|
|
|
|
@ -11292,6 +11320,28 @@ NativeCodeBasicBlock* NativeCodeBasicBlock::BinaryOperator(InterCodeProcedure* p
|
|
|
|
|
mIns.Push(NativeCodeInstruction(ins, ASMIT_LDA, ASMIM_IMMEDIATE, 0));
|
|
|
|
|
mIns.Push(NativeCodeInstruction(ins, ASMIT_STA, ASMIM_ZERO_PAGE, treg + 1));
|
|
|
|
|
}
|
|
|
|
|
#if 1
|
|
|
|
|
else if (ins->mOperator == IA_ADD && ins->mSrc[0].IsUByte() && ins->mSrc[1].IsSByte() && ins->mDst.mTemp != ins->mSrc[1].mTemp && ins->mSrc[1].mFinal)
|
|
|
|
|
{
|
|
|
|
|
// printf("ADD0 %s:%d, %d+%d->%d\n", mProc->mIdent->mString, mIndex, ins->mSrc[0].mTemp, ins->mSrc[1].mTemp, treg);
|
|
|
|
|
mIns.Push(NativeCodeInstruction(ins, ASMIT_LDA, ASMIM_ZERO_PAGE, BC_REG_TMP + proc->mTempOffset[ins->mSrc[1].mTemp]));
|
|
|
|
|
mIns.Push(NativeCodeInstruction(ins, ASMIT_EOR, ASMIM_IMMEDIATE, 0x80));
|
|
|
|
|
mIns.Push(NativeCodeInstruction(ins, ASMIT_ROL, ASMIM_IMPLIED));
|
|
|
|
|
mIns.Push(NativeCodeInstruction(ins, ASMIT_AND, ASMIM_IMMEDIATE, 0x01));
|
|
|
|
|
mIns.Push(NativeCodeInstruction(ins, ASMIT_SBC, ASMIM_IMMEDIATE, 0x00));
|
|
|
|
|
mIns.Push(NativeCodeInstruction(ins, ASMIT_STA, ASMIM_ZERO_PAGE, treg + 1));
|
|
|
|
|
}
|
|
|
|
|
else if (ins->mOperator == IA_ADD && ins->mSrc[1].IsUByte() && ins->mSrc[0].IsSByte() && ins->mDst.mTemp != ins->mSrc[0].mTemp && ins->mSrc[0].mFinal)
|
|
|
|
|
{
|
|
|
|
|
// printf("ADD1 %s:%d, %d+%d->%d\n", mProc->mIdent->mString, mIndex, ins->mSrc[0].mTemp, ins->mSrc[1].mTemp, treg);
|
|
|
|
|
mIns.Push(NativeCodeInstruction(ins, ASMIT_LDA, ASMIM_ZERO_PAGE, BC_REG_TMP + proc->mTempOffset[ins->mSrc[0].mTemp]));
|
|
|
|
|
mIns.Push(NativeCodeInstruction(ins, ASMIT_EOR, ASMIM_IMMEDIATE, 0x80));
|
|
|
|
|
mIns.Push(NativeCodeInstruction(ins, ASMIT_ROL, ASMIM_IMPLIED));
|
|
|
|
|
mIns.Push(NativeCodeInstruction(ins, ASMIT_AND, ASMIM_IMMEDIATE, 0x01));
|
|
|
|
|
mIns.Push(NativeCodeInstruction(ins, ASMIT_SBC, ASMIM_IMMEDIATE, 0x00));
|
|
|
|
|
mIns.Push(NativeCodeInstruction(ins, ASMIT_STA, ASMIM_ZERO_PAGE, treg + 1));
|
|
|
|
|
}
|
|
|
|
|
#endif
|
|
|
|
|
else
|
|
|
|
|
#endif
|
|
|
|
|
{
|
|
|
|
@ -11306,6 +11356,7 @@ NativeCodeBasicBlock* NativeCodeBasicBlock::BinaryOperator(InterCodeProcedure* p
|
|
|
|
|
mIns.Push(NativeCodeInstruction(ins, ASMIT_STA, ASMIM_ZERO_PAGE, treg + 1));
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
@ -26580,6 +26631,40 @@ bool NativeCodeBasicBlock::JoinTailCodeSequences(NativeCodeProcedure* proc, bool
|
|
|
|
|
#endif
|
|
|
|
|
CheckLive();
|
|
|
|
|
|
|
|
|
|
if (mEntryBlocks.Size() == 1 && mIns.Size() >= 1 && mIns[0].mType == ASMIT_LDA && mIns[0].mMode == ASMIM_ZERO_PAGE && !(mIns[0].mLive & LIVE_CPU_REG_Z))
|
|
|
|
|
{
|
|
|
|
|
NativeCodeBasicBlock* eb = mEntryBlocks[0];
|
|
|
|
|
int index, addr;
|
|
|
|
|
if (eb->HasTailSTA(addr, index) && addr == mIns[0].mAddress)
|
|
|
|
|
{
|
|
|
|
|
for (int i = index; i < eb->mIns.Size(); i++)
|
|
|
|
|
eb->mIns[i].mLive |= LIVE_CPU_REG_A;
|
|
|
|
|
eb->mExitRequiredRegs += CPU_REG_A;
|
|
|
|
|
mEntryRequiredRegs += CPU_REG_A;
|
|
|
|
|
mIns.Remove(0);
|
|
|
|
|
changed = true;
|
|
|
|
|
}
|
|
|
|
|
else if (eb->HasTailSTY(addr, index) && addr == mIns[0].mAddress)
|
|
|
|
|
{
|
|
|
|
|
for (int i = index; i < eb->mIns.Size(); i++)
|
|
|
|
|
eb->mIns[i].mLive |= LIVE_CPU_REG_Y;
|
|
|
|
|
eb->mExitRequiredRegs += CPU_REG_Y;
|
|
|
|
|
mEntryRequiredRegs += CPU_REG_Y;
|
|
|
|
|
mIns[0].mType = ASMIT_TYA;
|
|
|
|
|
mIns[0].mMode = ASMIM_IMPLIED;
|
|
|
|
|
changed = true;
|
|
|
|
|
}
|
|
|
|
|
else if (eb->HasTailSTX(addr, index) && addr == mIns[0].mAddress)
|
|
|
|
|
{
|
|
|
|
|
for (int i = index; i < eb->mIns.Size(); i++)
|
|
|
|
|
eb->mIns[i].mLive |= LIVE_CPU_REG_X;
|
|
|
|
|
eb->mExitRequiredRegs += CPU_REG_X;
|
|
|
|
|
mEntryRequiredRegs += CPU_REG_X;
|
|
|
|
|
mIns[0].mType = ASMIT_TXA;
|
|
|
|
|
mIns[0].mMode = ASMIM_IMPLIED;
|
|
|
|
|
changed = true;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
#if 1
|
|
|
|
|
if (mIns.Size() >= 1 && mIns[0].mType == ASMIT_TAX && !(mIns[0].mLive & (LIVE_CPU_REG_A | LIVE_CPU_REG_Z)) && !mEntryRegA)
|
|
|
|
|
{
|
|
|
|
@ -28918,6 +29003,22 @@ bool NativeCodeBasicBlock::FindImmediateStore(int at, int reg, const NativeCodeI
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
int NativeCodeBasicBlock::FindImmediateGlobalStore(int at, const NativeCodeInstruction& ins)
|
|
|
|
|
{
|
|
|
|
|
at--;
|
|
|
|
|
while (at >= 0 && !ins.MayBeChangedOnAddress(mIns[at]))
|
|
|
|
|
at--;
|
|
|
|
|
if (at > 0 && mIns[at].mMode == ASMIM_ABSOLUTE && mIns[at].mLinkerObject == ins.mLinkerObject && mIns[at].mAddress == ins.mAddress &&
|
|
|
|
|
mIns[at - 1].mType == ASMIT_LDA && mIns[at - 1].mMode == ASMIM_IMMEDIATE)
|
|
|
|
|
{
|
|
|
|
|
return mIns[at - 1].mAddress;
|
|
|
|
|
}
|
|
|
|
|
if (at < 0 && mEntryBlocks.Size() == 1)
|
|
|
|
|
return mEntryBlocks[0]->FindImmediateGlobalStore(mEntryBlocks[0]->mIns.Size(), ins);
|
|
|
|
|
|
|
|
|
|
return -1;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
bool NativeCodeBasicBlock::CheckPatchFailUse(void)
|
|
|
|
|
{
|
|
|
|
|
if (mPatchStart)
|
|
|
|
@ -31167,6 +31268,165 @@ bool NativeCodeBasicBlock::PatchBoolBitPropagation(const NativeCodeBasicBlock* b
|
|
|
|
|
return changed;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
bool NativeCodeBasicBlock::CollectRegBoolInstructionsForward(int reg, ExpandingArray<NativeCodeBasicBlock*>& cblocks, ExpandingArray<NativeCodeInstruction*>& lins)
|
|
|
|
|
{
|
|
|
|
|
if (!mPatchStart)
|
|
|
|
|
{
|
|
|
|
|
mPatchStart = true;
|
|
|
|
|
if (!mEntryRequiredRegs[reg])
|
|
|
|
|
return true;
|
|
|
|
|
|
|
|
|
|
int i = 0;
|
|
|
|
|
while (i < mIns.Size() && !mIns[i].ReferencesZeroPage(reg))
|
|
|
|
|
i++;
|
|
|
|
|
if (i < mIns.Size())
|
|
|
|
|
{
|
|
|
|
|
if (mIns[i].UsesZeroPage(reg))
|
|
|
|
|
{
|
|
|
|
|
if (i == mIns.Size() - 1 &&
|
|
|
|
|
(mBranch == ASMIT_BEQ || mBranch == ASMIT_BNE) &&
|
|
|
|
|
(mIns[i].mType == ASMIT_LDA && !(mIns[i].mLive & LIVE_CPU_REG_A) ||
|
|
|
|
|
mIns[i].mType == ASMIT_LDX && !(mIns[i].mLive & LIVE_CPU_REG_X) ||
|
|
|
|
|
mIns[i].mType == ASMIT_LDY && !(mIns[i].mLive & LIVE_CPU_REG_Y)))
|
|
|
|
|
{
|
|
|
|
|
cblocks.Push(this);
|
|
|
|
|
}
|
|
|
|
|
else
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
else if (mIns[i].ChangesZeroPage(reg))
|
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
for (int i = 0; i < mEntryBlocks.Size(); i++)
|
|
|
|
|
{
|
|
|
|
|
if (!mEntryBlocks[i]->CollectRegBoolInstructionsBackward(reg, cblocks, lins))
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (mTrueJump && !mTrueJump->CollectRegBoolInstructionsForward(reg, cblocks, lins))
|
|
|
|
|
return false;
|
|
|
|
|
if (mFalseJump && !mFalseJump->CollectRegBoolInstructionsForward(reg, cblocks, lins))
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
bool NativeCodeBasicBlock::CollectRegBoolInstructionsBackward(int reg, ExpandingArray<NativeCodeBasicBlock*>& cblocks, ExpandingArray<NativeCodeInstruction*>& lins)
|
|
|
|
|
{
|
|
|
|
|
if (!mPatchExit)
|
|
|
|
|
{
|
|
|
|
|
mPatchExit = true;
|
|
|
|
|
|
|
|
|
|
int sz = mIns.Size();
|
|
|
|
|
if (mFalseJump && (mBranch == ASMIT_BEQ || mBranch == ASMIT_BNE) && sz > 0 && mIns[sz - 1].mMode == ASMIM_ZERO_PAGE && !(mIns[sz - 1].mLive & LIVE_MEM) && mIns[sz - 1].mAddress == reg)
|
|
|
|
|
{
|
|
|
|
|
if (mIns[sz - 1].mType == ASMIT_LDA && !(mIns[sz - 1].mLive & LIVE_CPU_REG_A) ||
|
|
|
|
|
mIns[sz - 1].mType == ASMIT_LDX && !(mIns[sz - 1].mLive & LIVE_CPU_REG_X) ||
|
|
|
|
|
mIns[sz - 1].mType == ASMIT_LDY && !(mIns[sz - 1].mLive & LIVE_CPU_REG_Y))
|
|
|
|
|
{
|
|
|
|
|
cblocks.Push(this);
|
|
|
|
|
sz -= 1;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
int i = sz - 1;
|
|
|
|
|
while (i >= 0 && !mIns[i].ReferencesZeroPage(reg))
|
|
|
|
|
i--;
|
|
|
|
|
|
|
|
|
|
bool found = false;
|
|
|
|
|
if (i >= 0)
|
|
|
|
|
{
|
|
|
|
|
if (i > 0 &&
|
|
|
|
|
mIns[i].mType == ASMIT_STA && mIns[i - 1].mType == ASMIT_LDA && mIns[i - 1].mMode == ASMIM_IMMEDIATE && !(mIns[i].mLive & LIVE_CPU_REG_A) ||
|
|
|
|
|
mIns[i].mType == ASMIT_STX && mIns[i - 1].mType == ASMIT_LDX && mIns[i - 1].mMode == ASMIM_IMMEDIATE && !(mIns[i].mLive & LIVE_CPU_REG_X) ||
|
|
|
|
|
mIns[i].mType == ASMIT_STY && mIns[i - 1].mType == ASMIT_LDY && mIns[i - 1].mMode == ASMIM_IMMEDIATE && !(mIns[i].mLive & LIVE_CPU_REG_Y))
|
|
|
|
|
{
|
|
|
|
|
lins.Push(&(mIns[i - 1]));
|
|
|
|
|
found = true;
|
|
|
|
|
}
|
|
|
|
|
else
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (!found)
|
|
|
|
|
{
|
|
|
|
|
if (!mEntryBlocks.Size())
|
|
|
|
|
return false;
|
|
|
|
|
|
|
|
|
|
for (int i = 0; i < mEntryBlocks.Size(); i++)
|
|
|
|
|
{
|
|
|
|
|
if (!mEntryBlocks[i]->CollectRegBoolInstructionsBackward(reg, cblocks, lins))
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (mTrueJump && !mTrueJump->CollectRegBoolInstructionsForward(reg, cblocks, lins))
|
|
|
|
|
return false;
|
|
|
|
|
if (mFalseJump && !mFalseJump->CollectRegBoolInstructionsForward(reg, cblocks, lins))
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
bool NativeCodeBasicBlock::PatchBitBoolConstOrigin(void)
|
|
|
|
|
{
|
|
|
|
|
bool changed = false;
|
|
|
|
|
|
|
|
|
|
if (!mVisited)
|
|
|
|
|
{
|
|
|
|
|
mVisited = true;
|
|
|
|
|
|
|
|
|
|
int sz = mIns.Size();
|
|
|
|
|
if (mFalseJump && (mBranch == ASMIT_BEQ || mBranch == ASMIT_BNE) && sz > 0 && mIns[sz - 1].mMode == ASMIM_ZERO_PAGE && !(mIns[sz - 1].mLive & LIVE_MEM))
|
|
|
|
|
{
|
|
|
|
|
if (mIns[sz - 1].mType == ASMIT_LDA && !(mIns[sz - 1].mLive & LIVE_CPU_REG_A) ||
|
|
|
|
|
mIns[sz - 1].mType == ASMIT_LDX && !(mIns[sz - 1].mLive & LIVE_CPU_REG_X) ||
|
|
|
|
|
mIns[sz - 1].mType == ASMIT_LDY && !(mIns[sz - 1].mLive & LIVE_CPU_REG_Y))
|
|
|
|
|
{
|
|
|
|
|
ExpandingArray<NativeCodeBasicBlock*> lblocks;
|
|
|
|
|
ExpandingArray<NativeCodeInstruction*> lins;
|
|
|
|
|
|
|
|
|
|
mProc->ResetPatched();
|
|
|
|
|
|
|
|
|
|
if (CollectRegBoolInstructionsBackward(mIns[sz - 1].mAddress, lblocks, lins))
|
|
|
|
|
{
|
|
|
|
|
for (int i = 0; i < lins.Size(); i++)
|
|
|
|
|
{
|
|
|
|
|
if (lins[i]->mAddress != 0)
|
|
|
|
|
lins[i]->mAddress = 0xff;
|
|
|
|
|
}
|
|
|
|
|
for (int i = 0; i < lblocks.Size(); i++)
|
|
|
|
|
{
|
|
|
|
|
if (!lblocks[i]->mPatched)
|
|
|
|
|
{
|
|
|
|
|
lblocks[i]->mPatched = true;
|
|
|
|
|
int sz = lblocks[i]->mIns.Size();
|
|
|
|
|
lblocks[i]->mIns[sz - 1].mType = ASMIT_BIT;
|
|
|
|
|
if (lblocks[i]->mBranch == ASMIT_BNE)
|
|
|
|
|
lblocks[i]->mBranch = ASMIT_BMI;
|
|
|
|
|
else
|
|
|
|
|
lblocks[i]->mBranch = ASMIT_BPL;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
changed = true;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (mTrueJump && mTrueJump->PatchBitBoolConstOrigin())
|
|
|
|
|
changed = true;
|
|
|
|
|
if (mFalseJump && mFalseJump->PatchBitBoolConstOrigin())
|
|
|
|
|
changed = true;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return changed;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
bool NativeCodeBasicBlock::CheckGlobalAddressSumYPointer(const NativeCodeBasicBlock * block, int reg, int index, int at, int yval)
|
|
|
|
|
{
|
|
|
|
|
if (!mPatched)
|
|
|
|
@ -34887,7 +35147,21 @@ bool NativeCodeBasicBlock::PartialBackwardValuePropagation(void)
|
|
|
|
|
i++;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#if 1
|
|
|
|
|
for (int i = 0; i < mIns.Size(); i++)
|
|
|
|
|
{
|
|
|
|
|
if ((mIns[i].mType == ASMIT_LDA || mIns[i].mType == ASMIT_LDX || mIns[i].mType == ASMIT_LDY) && mIns[i].mMode == ASMIM_ABSOLUTE)
|
|
|
|
|
{
|
|
|
|
|
int val = FindImmediateGlobalStore(i, mIns[i]);
|
|
|
|
|
if (val >= 0)
|
|
|
|
|
{
|
|
|
|
|
mIns[i].mMode = ASMIM_IMMEDIATE;
|
|
|
|
|
mIns[i].mAddress = val;
|
|
|
|
|
changed = true;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
#endif
|
|
|
|
|
if (mTrueJump && mTrueJump->PartialBackwardValuePropagation())
|
|
|
|
|
changed = true;
|
|
|
|
|
if (mFalseJump && mFalseJump->PartialBackwardValuePropagation())
|
|
|
|
@ -39990,6 +40264,31 @@ bool NativeCodeBasicBlock::OptimizeSimpleLoopInvariant(NativeCodeProcedure* proc
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (sz >= 2 &&
|
|
|
|
|
mIns[0].mType == ASMIT_LDX && mIns[0].mMode == ASMIM_ABSOLUTE &&
|
|
|
|
|
mIns[1].mType == ASMIT_INC && mIns[0].SameEffectiveAddress(mIns[1]))
|
|
|
|
|
{
|
|
|
|
|
int i = 2;
|
|
|
|
|
while (i < mIns.Size() && !mIns[i].ChangesXReg() && !mIns[0].MayBeChangedOnAddress(mIns[i]))
|
|
|
|
|
i++;
|
|
|
|
|
if (i == mIns.Size())
|
|
|
|
|
{
|
|
|
|
|
if (!prevBlock)
|
|
|
|
|
return OptimizeSimpleLoopInvariant(proc, full);
|
|
|
|
|
for (int i = 0; i < mIns.Size(); i++)
|
|
|
|
|
mIns[i].mLive |= LIVE_CPU_REG;
|
|
|
|
|
mIns[1].mType = ASMIT_INX; mIns[1].mMode = ASMIM_IMPLIED;
|
|
|
|
|
prevBlock->mIns.Push(mIns[0]);
|
|
|
|
|
exitBlock->mIns.Insert(0, NativeCodeInstruction(mIns[0].mIns, ASMIT_STX, mIns[0]));
|
|
|
|
|
mIns.Remove(0);
|
|
|
|
|
prevBlock->mExitRequiredRegs += CPU_REG_X;
|
|
|
|
|
mEntryRequiredRegs += CPU_REG_X;
|
|
|
|
|
mExitRequiredRegs += CPU_REG_X;
|
|
|
|
|
exitBlock->mEntryRequiredRegs += CPU_REG_X;
|
|
|
|
|
changed = true;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (sz >= 2 && mIns[0].mType == ASMIT_LDX && mIns[0].mMode == ASMIM_ZERO_PAGE)
|
|
|
|
|
{
|
|
|
|
|
int i = mIns.Size() - 1;
|
|
|
|
@ -45206,6 +45505,43 @@ void NativeCodeBasicBlock::BlockSizeReduction(NativeCodeProcedure* proc, int xen
|
|
|
|
|
}
|
|
|
|
|
#endif
|
|
|
|
|
|
|
|
|
|
// Check for small condition block
|
|
|
|
|
if (mFalseJump)
|
|
|
|
|
{
|
|
|
|
|
NativeCodeBasicBlock* cblock = nullptr, * eblock = nullptr;
|
|
|
|
|
|
|
|
|
|
if (mTrueJump == mFalseJump->mTrueJump && !mFalseJump->mFalseJump && mTrueJump->mNumEntries == 2 && mFalseJump->mNumEntries == 1)
|
|
|
|
|
{
|
|
|
|
|
cblock = mFalseJump;
|
|
|
|
|
eblock = mTrueJump;
|
|
|
|
|
}
|
|
|
|
|
else if (mFalseJump == mTrueJump->mTrueJump && !mTrueJump->mFalseJump && mFalseJump->mNumEntries == 2 && mTrueJump->mNumEntries == 1)
|
|
|
|
|
{
|
|
|
|
|
cblock = mTrueJump;
|
|
|
|
|
eblock = mFalseJump;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (eblock && cblock->mIns.Size() == 1 && cblock->mIns[0].mType == ASMIT_INC)
|
|
|
|
|
{
|
|
|
|
|
int i = 0;
|
|
|
|
|
while (i < eblock->mIns.Size() && !eblock->mIns[i].ReferencesCarry())
|
|
|
|
|
i++;
|
|
|
|
|
if (i < eblock->mIns.Size() && eblock->mIns[i].mType == ASMIT_CLC)
|
|
|
|
|
{
|
|
|
|
|
if (mBranch == ASMIT_BCC && eblock == mTrueJump || mBranch == ASMIT_BCS && eblock == mFalseJump)
|
|
|
|
|
{
|
|
|
|
|
cblock->mIns.Push(NativeCodeInstruction(eblock->mIns[i].mIns, ASMIT_CLC));
|
|
|
|
|
eblock->mIns.Remove(i);
|
|
|
|
|
mExitRequiredRegs += CPU_REG_C;
|
|
|
|
|
cblock->mExitRequiredRegs += CPU_REG_C;
|
|
|
|
|
eblock->mEntryRequiredRegs += CPU_REG_C;
|
|
|
|
|
for (int j = 0; j < i; j++)
|
|
|
|
|
eblock->mIns[j].mLive |= LIVE_CPU_REG_C;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
bool carrySet = false, carryClear = false;
|
|
|
|
|
if (mEntryBlocks.Size() == 1 && center >= 0)
|
|
|
|
|
{
|
|
|
|
@ -45252,7 +45588,7 @@ void NativeCodeBasicBlock::BlockSizeReduction(NativeCodeProcedure* proc, int xen
|
|
|
|
|
|
|
|
|
|
i = 0;
|
|
|
|
|
j = 0;
|
|
|
|
|
int accuVal = 0, accuMask = 0;
|
|
|
|
|
int accuVal = 0, accuMask = 0, xregVal = 0, xregMask = 0, yregVal = 0, yregMask = 0;
|
|
|
|
|
bool accuFlags = false;
|
|
|
|
|
while (i < mIns.Size())
|
|
|
|
|
{
|
|
|
|
@ -45398,12 +45734,74 @@ void NativeCodeBasicBlock::BlockSizeReduction(NativeCodeProcedure* proc, int xen
|
|
|
|
|
accuFlags = true;
|
|
|
|
|
}
|
|
|
|
|
break;
|
|
|
|
|
case ASMIT_LDX:
|
|
|
|
|
if (mIns[i].mMode == ASMIM_IMMEDIATE)
|
|
|
|
|
{
|
|
|
|
|
xregVal = mIns[i].mAddress;
|
|
|
|
|
xregMask = 0xff;
|
|
|
|
|
}
|
|
|
|
|
else
|
|
|
|
|
xregMask = 0;
|
|
|
|
|
accuFlags = false;
|
|
|
|
|
break;
|
|
|
|
|
case ASMIT_LDY:
|
|
|
|
|
if (mIns[i].mMode == ASMIM_IMMEDIATE)
|
|
|
|
|
{
|
|
|
|
|
yregVal = mIns[i].mAddress;
|
|
|
|
|
yregMask = 0xff;
|
|
|
|
|
}
|
|
|
|
|
else
|
|
|
|
|
yregMask = 0;
|
|
|
|
|
accuFlags = false;
|
|
|
|
|
break;
|
|
|
|
|
|
|
|
|
|
case ASMIT_LDA:
|
|
|
|
|
if (mIns[i].mMode == ASMIM_IMMEDIATE)
|
|
|
|
|
{
|
|
|
|
|
accuVal = mIns[i].mAddress;
|
|
|
|
|
accuMask = 0xff;
|
|
|
|
|
}
|
|
|
|
|
else if (mIns[i].mMode == ASMIM_ABSOLUTE && mIns[i].mLinkerObject && (mIns[i].mLinkerObject->mFlags & LOBJF_CONST) && mIns[i].mLinkerObject->mReferences.Size() == 0)
|
|
|
|
|
{
|
|
|
|
|
accuVal = mIns[i].mLinkerObject->mData[mIns[i].mAddress];
|
|
|
|
|
accuMask = 0xff;
|
|
|
|
|
}
|
|
|
|
|
#if 1
|
|
|
|
|
else if ((mIns[i].mMode == ASMIM_ABSOLUTE_X || mIns[i].mMode == ASMIM_ABSOLUTE_Y) && mIns[i].mLinkerObject && (mIns[i].mLinkerObject->mFlags & LOBJF_CONST) && mIns[i].mLinkerObject->mSize <= 256 && mIns[i].mLinkerObject->mReferences.Size() == 0)
|
|
|
|
|
{
|
|
|
|
|
int mor = 0;
|
|
|
|
|
int mand = 0xff;
|
|
|
|
|
int ior = 0;
|
|
|
|
|
int iand = 0xff;
|
|
|
|
|
|
|
|
|
|
if (mIns[i].mMode == ASMIM_ABSOLUTE_X)
|
|
|
|
|
{
|
|
|
|
|
ior = xregMask & xregVal;
|
|
|
|
|
iand = (~xregMask | xregVal) & 0xff;
|
|
|
|
|
}
|
|
|
|
|
else if (mIns[i].mMode == ASMIM_ABSOLUTE_Y)
|
|
|
|
|
{
|
|
|
|
|
ior = yregMask & yregVal;
|
|
|
|
|
iand = (~yregMask | yregVal) & 0xff;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
for (int j = 0; j < mIns[i].mLinkerObject->mSize - mIns[i].mAddress; j++)
|
|
|
|
|
{
|
|
|
|
|
if ((j & ~iand) == 0 && (j & ior) == ior)
|
|
|
|
|
{
|
|
|
|
|
mor |= mIns[i].mLinkerObject->mData[mIns[i].mAddress + j];
|
|
|
|
|
mand &= mIns[i].mLinkerObject->mData[mIns[i].mAddress + j];
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
accuMask = (mand | ~mor) & 0xff;
|
|
|
|
|
accuVal = mand;
|
|
|
|
|
#if 0
|
|
|
|
|
if (CheckFunc)
|
|
|
|
|
printf("Check %s + %d (%02x, %02x), %02x %02x\n", mLinkerObject->mIdent->mString, mAddress, ior, iand, opmask, opvalue);
|
|
|
|
|
#endif
|
|
|
|
|
}
|
|
|
|
|
#endif
|
|
|
|
|
else
|
|
|
|
|
{
|
|
|
|
|
accuVal = 0;
|
|
|
|
@ -45546,7 +45944,23 @@ void NativeCodeBasicBlock::BlockSizeReduction(NativeCodeProcedure* proc, int xen
|
|
|
|
|
accuFlags = mIns[i].mMode == ASMIM_IMPLIED;
|
|
|
|
|
break;
|
|
|
|
|
case ASMIT_TAX:
|
|
|
|
|
xregMask = accuMask;
|
|
|
|
|
xregVal = accuVal;
|
|
|
|
|
accuFlags = true;
|
|
|
|
|
break;
|
|
|
|
|
case ASMIT_TAY:
|
|
|
|
|
yregMask = accuMask;
|
|
|
|
|
yregVal = accuVal;
|
|
|
|
|
accuFlags = true;
|
|
|
|
|
break;
|
|
|
|
|
case ASMIT_TXA:
|
|
|
|
|
accuMask = xregMask;
|
|
|
|
|
accuVal = xregVal;
|
|
|
|
|
accuFlags = true;
|
|
|
|
|
break;
|
|
|
|
|
case ASMIT_TYA:
|
|
|
|
|
accuMask = yregMask;
|
|
|
|
|
accuVal = yregVal;
|
|
|
|
|
accuFlags = true;
|
|
|
|
|
break;
|
|
|
|
|
default:
|
|
|
|
@ -45555,6 +45969,10 @@ void NativeCodeBasicBlock::BlockSizeReduction(NativeCodeProcedure* proc, int xen
|
|
|
|
|
carryClear = false;
|
|
|
|
|
carrySet = false;
|
|
|
|
|
}
|
|
|
|
|
if (mIns[i].ChangesXReg())
|
|
|
|
|
xregMask = 0;
|
|
|
|
|
if (mIns[i].ChangesYReg())
|
|
|
|
|
yregMask = 0;
|
|
|
|
|
|
|
|
|
|
if (mIns[i].ChangesAccu())
|
|
|
|
|
{
|
|
|
|
@ -54280,7 +54698,27 @@ bool NativeCodeBasicBlock::PeepHoleOptimizerExits(int pass)
|
|
|
|
|
changed = true;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#if 1
|
|
|
|
|
else if (sz >= 4 &&
|
|
|
|
|
mIns[sz - 4].mType == ASMIT_EOR && mIns[sz - 4].mMode == ASMIM_IMMEDIATE && mIns[sz - 4].mAddress == 0x80 &&
|
|
|
|
|
mIns[sz - 3].mType == ASMIT_ROL && mIns[sz - 3].mMode == ASMIM_IMPLIED &&
|
|
|
|
|
mIns[sz - 2].mType == ASMIT_AND && mIns[sz - 2].mMode == ASMIM_IMMEDIATE && mIns[sz - 2].mAddress == 0x01 &&
|
|
|
|
|
mIns[sz - 1].mType == ASMIT_SBC && mIns[sz - 1].mMode == ASMIM_IMMEDIATE && mIns[sz - 1].mAddress == 0x00 && !(mIns[sz - 1].mLive & LIVE_CPU_REG_A) && !mExitRequiredRegs[CPU_REG_Z] && !mExitRequiredRegs[CPU_REG_C])
|
|
|
|
|
{
|
|
|
|
|
if (mBranch == ASMIT_BEQ)
|
|
|
|
|
{
|
|
|
|
|
mIns.Remove(sz - 4);
|
|
|
|
|
mBranch = ASMIT_BNE;
|
|
|
|
|
changed = true;
|
|
|
|
|
}
|
|
|
|
|
else if (mBranch == ASMIT_BNE)
|
|
|
|
|
{
|
|
|
|
|
mIns.Remove(sz - 4);
|
|
|
|
|
mBranch = ASMIT_BEQ;
|
|
|
|
|
changed = true;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
#endif
|
|
|
|
|
else if (sz >= 2 &&
|
|
|
|
|
mIns[sz - 2].mType == ASMIT_LDA && mIns[sz - 2].mMode == ASMIM_IMMEDIATE && mIns[sz - 2].mAddress == 0 &&
|
|
|
|
|
mIns[sz - 1].mType == ASMIT_SBC && mIns[sz - 1].mMode == ASMIM_IMMEDIATE && mIns[sz - 1].mAddress == 0 && !(mIns[sz - 1].mLive & (LIVE_CPU_REG_A | LIVE_CPU_REG_C)) && !mExitRequiredRegs[CPU_REG_Z])
|
|
|
|
@ -55930,7 +56368,7 @@ void NativeCodeProcedure::Compile(InterCodeProcedure* proc)
|
|
|
|
|
|
|
|
|
|
mInterProc->mLinkerObject->mNativeProc = this;
|
|
|
|
|
|
|
|
|
|
CheckFunc = !strcmp(mIdent->mString, "main");
|
|
|
|
|
CheckFunc = !strcmp(mIdent->mString, "deco");
|
|
|
|
|
|
|
|
|
|
int nblocks = proc->mBlocks.Size();
|
|
|
|
|
tblocks = new NativeCodeBasicBlock * [nblocks];
|
|
|
|
@ -57831,6 +58269,13 @@ void NativeCodeProcedure::Optimize(void)
|
|
|
|
|
changed = true;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (step == 23)
|
|
|
|
|
{
|
|
|
|
|
ResetVisited();
|
|
|
|
|
if (mEntryBlock->PatchBitBoolConstOrigin())
|
|
|
|
|
changed = true;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#if _DEBUG
|
|
|
|
|
ResetVisited();
|
|
|
|
|
mEntryBlock->CheckAsmCode();
|
|
|
|
@ -57856,7 +58301,7 @@ void NativeCodeProcedure::Optimize(void)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#if 1
|
|
|
|
|
if (!changed && step < 23)
|
|
|
|
|
if (!changed && step < 24)
|
|
|
|
|
{
|
|
|
|
|
ResetIndexFlipped();
|
|
|
|
|
|
|
|
|
|