Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[JIT] [Issue: 61620] Optimizing ARM64 for *x = dblCns; #61847

Merged
merged 10 commits into from
Nov 30, 2021
39 changes: 39 additions & 0 deletions src/coreclr/jit/lower.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -6792,6 +6792,45 @@ void Lowering::LowerStoreIndirCommon(GenTreeStoreInd* ind)
TryCreateAddrMode(ind->Addr(), true, ind->TypeGet());
if (!comp->codeGen->gcInfo.gcIsWriteBarrierStoreIndNode(ind))
{
if (varTypeIsFloating(ind) && ind->Data()->IsCnsFltOrDbl())
{
// Optimize *x = DCON to *x = ICON which is slightly faster on xarch
SeanWoo marked this conversation as resolved.
Show resolved Hide resolved
GenTree* data = ind->Data();
double dblCns = data->AsDblCon()->gtDconVal;
ssize_t intCns = 0;
var_types type = TYP_UNKNOWN;

#if defined(TARGET_XARCH) || defined(TARGET_ARM)
bool shouldSwitchToInteger = true;
#else // TARGET_ARM64
bool shouldSwitchToInteger = !data->IsCnsNonZeroFltOrDbl();
#endif
SeanWoo marked this conversation as resolved.
Show resolved Hide resolved

if (shouldSwitchToInteger)
{
if (ind->TypeIs(TYP_FLOAT))
{
float fltCns = static_cast<float>(dblCns); // should be a safe round-trip
intCns = static_cast<ssize_t>(*reinterpret_cast<INT32*>(&fltCns));
type = TYP_INT;
}
#ifdef TARGET_64BIT
else
{
assert(ind->TypeIs(TYP_DOUBLE));
intCns = static_cast<ssize_t>(*reinterpret_cast<INT64*>(&dblCns));
type = TYP_LONG;
}
#endif
}

if (type != TYP_UNKNOWN)
{
data->BashToConst(intCns, type);
ind->ChangeType(type);
}
}

LowerStoreIndir(ind);
}
}
Expand Down
32 changes: 1 addition & 31 deletions src/coreclr/jit/lowerxarch.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -130,37 +130,7 @@ void Lowering::LowerStoreIndir(GenTreeStoreInd* node)
return;
}
}
else if (node->Data()->IsCnsFltOrDbl())
{
// Optimize *x = DCON to *x = ICON which is slightly faster on xarch
GenTree* data = node->Data();
double dblCns = data->AsDblCon()->gtDconVal;
ssize_t intCns = 0;
var_types type = TYP_UNKNOWN;

if (node->TypeIs(TYP_FLOAT))
{
float fltCns = static_cast<float>(dblCns); // should be a safe round-trip
intCns = static_cast<ssize_t>(*reinterpret_cast<INT32*>(&fltCns));
type = TYP_INT;
}
#ifdef TARGET_AMD64
else
{
assert(node->TypeIs(TYP_DOUBLE));
intCns = static_cast<ssize_t>(*reinterpret_cast<INT64*>(&dblCns));
type = TYP_LONG;
}
#endif

if (type != TYP_UNKNOWN)
{
data->SetContained();
data->BashToConst(intCns, type);
node->ChangeType(type);
}
}


// Optimization: do not unnecessarily zero-extend the result of setcc.
if (varTypeIsByte(node) && (node->Data()->OperIsCompare() || node->Data()->OperIs(GT_SETCC)))
{
Expand Down