RLE: better handling of ref_element/tail_addr [immutable]

Rerun RLE with cutting off the base address of loads at `ref_element/tail_addr [immutable]`. This increases the chance of catching loads of immutable COW class properties or elements.
This commit is contained in:
Erik Eckstein
2021-11-24 16:32:52 +01:00
parent b89f58de6d
commit f97876c9e7
7 changed files with 131 additions and 49 deletions

View File

@@ -245,28 +245,60 @@ void LSLocation::reduce(LSLocation Base, SILModule *M,
replaceSubLocations(Base, M, context, Locs, SubLocations);
}
void LSLocation::enumerateLSLocation(TypeExpansionContext context, SILModule *M,
std::pair<SILValue, bool>
LSLocation::getBaseAddressOrObject(SILValue v, bool stopAtImmutable) {
bool isImmutable = false;
while (true) {
if (auto *rea = dyn_cast<RefElementAddrInst>(v)) {
if (rea->isImmutable()) {
isImmutable = true;
if (stopAtImmutable)
return {v, true};
}
}
if (auto *rta = dyn_cast<RefTailAddrInst>(v)) {
if (rta->isImmutable()) {
isImmutable = true;
if (stopAtImmutable)
return {v, true};
}
}
SILValue v2 = stripCastsWithoutMarkDependence(v);
v2 = stripSinglePredecessorArgs(v2);
if (Projection::isAddressProjection(v2))
v2 = cast<SingleValueInstruction>(v2)->getOperand(0);
v2 = stripIndexingInsts(v2);
v2 = lookThroughOwnershipInsts(v2);
if (v2 == v)
return {v2, isImmutable};
v = v2;
}
}
bool LSLocation::enumerateLSLocation(TypeExpansionContext context, SILModule *M,
SILValue Mem,
std::vector<LSLocation> &Locations,
LSLocationIndexMap &IndexMap,
LSLocationBaseMap &BaseMap,
TypeExpansionAnalysis *TypeCache) {
TypeExpansionAnalysis *TypeCache,
bool stopAtImmutable) {
// We have processed this SILValue before.
if (BaseMap.find(Mem) != BaseMap.end())
return;
return false;
// Construct a Location to represent the memory written by this instruction.
// ProjectionPath currently does not handle mark_dependence so stop our
// underlying object search at these instructions.
// We still get a benefit if we cse mark_dependence instructions and then
// merge loads from them.
SILValue UO = getUnderlyingObjectStopAtMarkDependence(Mem);
auto baseAndImmutable = getBaseAddressOrObject(Mem, stopAtImmutable);
SILValue UO = baseAndImmutable.first;
LSLocation L(UO, ProjectionPath::getProjectionPath(UO, Mem));
// If we can't figure out the Base or Projection Path for the memory location,
// simply ignore it for now.
if (!L.isValid())
return;
return false;
// Record the SILValue to location mapping.
BaseMap[Mem] = L;
@@ -281,6 +313,7 @@ void LSLocation::enumerateLSLocation(TypeExpansionContext context, SILModule *M,
IndexMap[Loc] = Locations.size();
Locations.push_back(Loc);
}
return baseAndImmutable.first;
}
void
@@ -289,22 +322,26 @@ LSLocation::enumerateLSLocations(SILFunction &F,
LSLocationIndexMap &IndexMap,
LSLocationBaseMap &BaseMap,
TypeExpansionAnalysis *TypeCache,
std::pair<int, int> &LSCount) {
bool stopAtImmutable,
int &numLoads, int &numStores,
bool &immutableLoadsFound) {
// Enumerate all locations accessed by the loads or stores.
for (auto &B : F) {
for (auto &I : B) {
if (auto *LI = dyn_cast<LoadInst>(&I)) {
enumerateLSLocation(F.getTypeExpansionContext(), &I.getModule(),
if (enumerateLSLocation(F.getTypeExpansionContext(), &I.getModule(),
LI->getOperand(), Locations, IndexMap, BaseMap,
TypeCache);
++LSCount.first;
TypeCache, stopAtImmutable)) {
immutableLoadsFound = true;
}
++numLoads;
continue;
}
if (auto *SI = dyn_cast<StoreInst>(&I)) {
enumerateLSLocation(F.getTypeExpansionContext(), &I.getModule(),
SI->getDest(), Locations, IndexMap, BaseMap,
TypeCache);
++LSCount.second;
TypeCache, stopAtImmutable);
++numStores;
continue;
}
}