Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

JIT: Reimport full spill clique for I_IMPL<->BYREF mismatches #92307

Merged
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 0 additions & 1 deletion src/coreclr/jit/compiler.h
Original file line number Diff line number Diff line change
Expand Up @@ -4239,7 +4239,6 @@ class Compiler
void impImportBlockCode(BasicBlock* block);

void impReimportMarkBlock(BasicBlock* block);
void impReimportMarkSuccessors(BasicBlock* block);

void impVerifyEHBlock(BasicBlock* block, bool isTryStart);

Expand Down
71 changes: 27 additions & 44 deletions src/coreclr/jit/importer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -11043,20 +11043,6 @@ inline void Compiler::impReimportMarkBlock(BasicBlock* block)
block->bbFlags &= ~BBF_IMPORTED;
}

/*****************************************************************************
* Mark the successors of the given block as unimported.
* Note that the caller is responsible for calling impImportBlockPending()
* for all the successors, with the appropriate stack-state.
*/

void Compiler::impReimportMarkSuccessors(BasicBlock* block)
{
for (BasicBlock* const succBlock : block->Succs())
{
impReimportMarkBlock(succBlock);
}
}

/*****************************************************************************
*
* Filter wrapper to handle only passed in exception code
Expand Down Expand Up @@ -11379,38 +11365,35 @@ void Compiler::impImportBlock(BasicBlock* block)
baseTmp = impGetSpillTmpBase(block);
}

/* Spill all stack entries into temps */
unsigned level, tempNum;
// Spill all stack entries into temps

JITDUMP("\nSpilling stack entries into temps\n");
for (level = 0, tempNum = baseTmp; level < verCurrentState.esStackDepth; level++, tempNum++)
for (unsigned level = 0, tempNum = baseTmp; level < verCurrentState.esStackDepth; level++, tempNum++)
{
GenTree* tree = verCurrentState.esStack[level].val;

/* VC generates code where it pushes a byref from one branch, and an int (ldc.i4 0) from
the other. This should merge to a byref in unverifiable code.
However, if the branch which leaves the TYP_I_IMPL on the stack is imported first, the
successor would be imported assuming there was a TYP_I_IMPL on
the stack. Thus the value would not get GC-tracked. Hence,
change the temp to TYP_BYREF and reimport the successors.
Note: We should only allow this in unverifiable code.
*/
if (tree->gtType == TYP_BYREF && lvaTable[tempNum].lvType == TYP_I_IMPL)
// VC generates code where it pushes a byref from one branch, and an int (ldc.i4 0) from
// the other. This should merge to a byref in unverifiable code.
// However, if the branch which leaves the TYP_I_IMPL on the stack is imported first, the
// successor would be imported assuming there was a TYP_I_IMPL on
// the stack. Thus the value would not get GC-tracked. Hence,
// change the temp to TYP_BYREF and reimport the clique.
LclVarDsc* tempDsc = lvaGetDesc(tempNum);
if (tree->TypeIs(TYP_BYREF) && (tempDsc->TypeGet() == TYP_I_IMPL))
{
lvaTable[tempNum].lvType = TYP_BYREF;
impReimportMarkSuccessors(block);
markImport = true;
jakobbotsch marked this conversation as resolved.
Show resolved Hide resolved
tempDsc->lvType = TYP_BYREF;
reimportSpillClique = true;
}

#ifdef TARGET_64BIT
if (genActualType(tree->gtType) == TYP_I_IMPL && lvaTable[tempNum].lvType == TYP_INT)
if ((genActualType(tree) == TYP_I_IMPL) && (tempDsc->TypeGet() == TYP_INT))
{
// Some other block in the spill clique set this to "int", but now we have "native int".
// Change the type and go back to re-import any blocks that used the wrong type.
lvaTable[tempNum].lvType = TYP_I_IMPL;
reimportSpillClique = true;
tempDsc->lvType = TYP_I_IMPL;
reimportSpillClique = true;
}
else if (genActualType(tree->gtType) == TYP_INT && lvaTable[tempNum].lvType == TYP_I_IMPL)
else if ((genActualType(tree) == TYP_INT) && (tempDsc->TypeGet() == TYP_I_IMPL))
{
// Spill clique has decided this should be "native int", but this block only pushes an "int".
// Insert a sign-extension to "native int" so we match the clique.
Expand All @@ -11425,14 +11408,14 @@ void Compiler::impImportBlock(BasicBlock* block)
// imported already, we need to change the type of the local and reimport the spill clique.
// If the 'byref' side has imported, we insert a cast from int to 'native int' to match
// the 'byref' size.
if (genActualType(tree->gtType) == TYP_BYREF && lvaTable[tempNum].lvType == TYP_INT)
if ((genActualType(tree) == TYP_BYREF) && (tempDsc->TypeGet() == TYP_INT))
{
// Some other block in the spill clique set this to "int", but now we have "byref".
// Change the type and go back to re-import any blocks that used the wrong type.
lvaTable[tempNum].lvType = TYP_BYREF;
reimportSpillClique = true;
tempDsc->lvType = TYP_BYREF;
reimportSpillClique = true;
}
else if (genActualType(tree->gtType) == TYP_INT && lvaTable[tempNum].lvType == TYP_BYREF)
else if ((genActualType(tree) == TYP_INT) && (tempDsc->TypeGet() == TYP_BYREF))
{
// Spill clique has decided this should be "byref", but this block only pushes an "int".
// Insert a sign-extension to "native int" so we match the clique size.
Expand All @@ -11441,14 +11424,14 @@ void Compiler::impImportBlock(BasicBlock* block)

#endif // TARGET_64BIT

if (tree->gtType == TYP_DOUBLE && lvaTable[tempNum].lvType == TYP_FLOAT)
if (tree->TypeIs(TYP_DOUBLE) && (tempDsc->lvType == TYP_FLOAT))
{
// Some other block in the spill clique set this to "float", but now we have "double".
// Change the type and go back to re-import any blocks that used the wrong type.
lvaTable[tempNum].lvType = TYP_DOUBLE;
reimportSpillClique = true;
tempDsc->lvType = TYP_DOUBLE;
reimportSpillClique = true;
}
else if (tree->gtType == TYP_FLOAT && lvaTable[tempNum].lvType == TYP_DOUBLE)
else if (tree->TypeIs(TYP_FLOAT) && (tempDsc->TypeGet() == TYP_DOUBLE))
{
// Spill clique has decided this should be "double", but this block only pushes a "float".
// Insert a cast to "double" so we match the clique.
Expand All @@ -11459,11 +11442,11 @@ void Compiler::impImportBlock(BasicBlock* block)
are spilling to the temps already used by a previous block),
we need to spill addStmt */

if (addStmt != nullptr && !newTemps && gtHasRef(addStmt->GetRootNode(), tempNum))
if ((addStmt != nullptr) && !newTemps && gtHasRef(addStmt->GetRootNode(), tempNum))
{
GenTree* addTree = addStmt->GetRootNode();

if (addTree->gtOper == GT_JTRUE)
if (addTree->OperIs(GT_JTRUE))
{
GenTree* relOp = addTree->AsOp()->gtOp1;
assert(relOp->OperIsCompare());
Expand All @@ -11488,7 +11471,7 @@ void Compiler::impImportBlock(BasicBlock* block)
}
else
{
assert(addTree->gtOper == GT_SWITCH && genActualTypeIsIntOrI(addTree->AsOp()->gtOp1->TypeGet()));
assert(addTree->OperIs(GT_SWITCH) && genActualTypeIsIntOrI(addTree->AsOp()->gtOp1->TypeGet()));

unsigned temp = lvaGrabTemp(true DEBUGARG("spill addStmt SWITCH"));
impStoreTemp(temp, addTree->AsOp()->gtOp1, level);
Expand Down
1 change: 0 additions & 1 deletion src/coreclr/jit/lclvars.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -4146,7 +4146,6 @@ void Compiler::lvaMarkLclRefs(GenTree* tree, BasicBlock* block, Statement* stmt,

// Check that the LCL_VAR node has the same type as the underlying variable, save a few mismatches we allow.
assert(tree->TypeIs(varDsc->TypeGet(), genActualType(varDsc)) ||
(tree->TypeIs(TYP_I_IMPL) && (varDsc->TypeGet() == TYP_BYREF)) || // Created for spill clique import.
(tree->TypeIs(TYP_BYREF) && (varDsc->TypeGet() == TYP_I_IMPL)) || // Created by inliner substitution.
(tree->TypeIs(TYP_INT) && (varDsc->TypeGet() == TYP_LONG))); // Created by "optNarrowTree".
}
Expand Down
Loading