Skip to content

Commit

Permalink
Manual merged main:bdbc2db5360e043c7e32cabeaa230cf91f5e51e2 into amd-…
Browse files Browse the repository at this point in the history
…gfx:ac9bf99e79f1

Local branch amd-gfx ac9bf99 Merged main:ef112833e11e94ea049f98bec4a29b4fe96a25dd into amd-gfx:d621799ca22b
Remote branch main bdbc2db [RemoveDIs] Enable conversion from dbg.declare to DPValue (llvm#74090)

Change-Id: Ic1c577a48c31052c9e44ce3733adc8b71a132127
  • Loading branch information
mariusz-sikora-at-amd committed Dec 13, 2023
2 parents ac9bf99 + bdbc2db commit c2216b3
Show file tree
Hide file tree
Showing 1,432 changed files with 218,652 additions and 19,824 deletions.
1 change: 1 addition & 0 deletions .github/workflows/libcxx-build-and-test.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -160,6 +160,7 @@ jobs:
'generic-no-tzdb',
'generic-no-unicode',
'generic-no-wide-characters',
'generic-no-rtti',
'generic-static',
'generic-with_llvm_unwinder',
# TODO Find a better place for the benchmark and bootstrapping builds to live. They're either very expensive
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/llvm-project-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -96,7 +96,7 @@ jobs:
# This should be a no-op for non-mac OSes
PKG_CONFIG_PATH: /usr/local/Homebrew/Library/Homebrew/os/mac/pkgconfig//12
with:
cmake_args: '-GNinja -DLLVM_ENABLE_PROJECTS="${{ inputs.projects }}" -DCMAKE_BUILD_TYPE=Release -DLLDB_INCLUDE_TESTS=OFF -DCMAKE_C_COMPILER_LAUNCHER=sccache -DCMAKE_CXX_COMPILER_LAUNCHER=sccache ${{ inputs.extra_cmake_args }}'
cmake_args: '-GNinja -DLLVM_ENABLE_PROJECTS="${{ inputs.projects }}" -DCMAKE_BUILD_TYPE=Release -DLLVM_ENABLE_ASSERTIONS=ON -DLLDB_INCLUDE_TESTS=OFF -DCMAKE_C_COMPILER_LAUNCHER=sccache -DCMAKE_CXX_COMPILER_LAUNCHER=sccache ${{ inputs.extra_cmake_args }}'
build_target: '${{ inputs.build_target }}'

- name: Build and Test libclc
Expand Down
13 changes: 10 additions & 3 deletions .github/workflows/new-prs.yml
Original file line number Diff line number Diff line change
Expand Up @@ -20,12 +20,19 @@ jobs:
permissions:
pull-requests: write
# Only comment on PRs that have been opened for the first time, by someone
# new to LLVM or to GitHub as a whole.
# new to LLVM or to GitHub as a whole. Ideally we'd look for FIRST_TIMER
# or FIRST_TIME_CONTRIBUTOR, but this does not appear to work. Instead check
# that we do not have any of the other author associations.
# See https://docs.github.com/en/webhooks/webhook-events-and-payloads?actionType=opened#pull_request
# for all the possible values.
if: >-
(github.repository == 'llvm/llvm-project') &&
(github.event.action == 'opened') &&
(github.event.pull_request.author_association == 'FIRST_TIME_CONTRIBUTOR' ||
github.event.pull_request.author_association == 'FIRST_TIMER')
(github.event.pull_request.author_association != 'COLLABORATOR') &&
(github.event.pull_request.author_association != 'CONTRIBUTOR') &&
(github.event.pull_request.author_association != 'MANNEQUIN') &&
(github.event.pull_request.author_association != 'MEMBER') &&
(github.event.pull_request.author_association != 'OWNER')
steps:
- name: Setup Automation Script
run: |
Expand Down
6 changes: 5 additions & 1 deletion .github/workflows/pr-code-format.yml
Original file line number Diff line number Diff line change
Expand Up @@ -67,10 +67,14 @@ jobs:
START_REV: ${{ github.event.pull_request.base.sha }}
END_REV: ${{ github.event.pull_request.head.sha }}
CHANGED_FILES: ${{ steps.changed-files.outputs.all_changed_files }}
# TODO(boomanaiden154): Once clang v18 is released, we should be able
# to take advantage of the new --diff_from_common_commit option
# explicitly in code-format-helper.py and not have to diff starting at
# the merge base.
run: |
python ./code-format-tools/llvm/utils/git/code-format-helper.py \
--token ${{ secrets.GITHUB_TOKEN }} \
--issue-number $GITHUB_PR_NUMBER \
--start-rev $START_REV \
--start-rev $(git merge-base $START_REV $END_REV) \
--end-rev $END_REV \
--changed-files "$CHANGED_FILES"
15 changes: 13 additions & 2 deletions bolt/include/bolt/Core/BinaryFunction.h
Original file line number Diff line number Diff line change
Expand Up @@ -75,6 +75,14 @@ enum IndirectCallPromotionType : char {
ICP_ALL /// Perform ICP on calls and jump tables.
};

/// Hash functions supported for BF/BB hashing.
enum class HashFunction : char {
StdHash, /// std::hash, implementation is platform-dependent. Provided for
/// backwards compatibility.
XXH3, /// llvm::xxh3_64bits, the default.
Default = XXH3,
};

/// Information on a single indirect call to a particular callee.
struct IndirectCallProfile {
MCSymbol *Symbol;
Expand Down Expand Up @@ -2234,18 +2242,21 @@ class BinaryFunction {
///
/// If \p UseDFS is set, process basic blocks in DFS order. Otherwise, use
/// the existing layout order.
/// \p HashFunction specifies which function is used for BF hashing.
///
/// By default, instruction operands are ignored while calculating the hash.
/// The caller can change this via passing \p OperandHashFunc function.
/// The return result of this function will be mixed with internal hash.
size_t computeHash(
bool UseDFS = false,
bool UseDFS = false, HashFunction HashFunction = HashFunction::Default,
OperandHashFuncTy OperandHashFunc = [](const MCOperand &) {
return std::string();
}) const;

/// Compute hash values for each block of the function.
void computeBlockHashes() const;
/// \p HashFunction specifies which function is used for BB hashing.
void
computeBlockHashes(HashFunction HashFunction = HashFunction::Default) const;

void setDWARFUnit(DWARFUnit *Unit) { DwarfUnit = Unit; }

Expand Down
2 changes: 0 additions & 2 deletions bolt/include/bolt/Core/DebugData.h
Original file line number Diff line number Diff line change
Expand Up @@ -459,8 +459,6 @@ class DebugStrOffsetsWriter {
std::unique_ptr<raw_svector_ostream> StrOffsetsStream;
std::map<uint32_t, uint32_t> IndexToAddressMap;
std::unordered_map<uint64_t, uint64_t> ProcessedBaseOffsets;
// Section size not including header.
uint32_t CurrentSectionSize{0};
bool StrOffsetSectionWasModified = false;
};

Expand Down
11 changes: 11 additions & 0 deletions bolt/include/bolt/Profile/ProfileYAMLMapping.h
Original file line number Diff line number Diff line change
Expand Up @@ -178,6 +178,14 @@ template <> struct ScalarBitSetTraits<PROFILE_PF> {
}
};

template <> struct ScalarEnumerationTraits<llvm::bolt::HashFunction> {
using HashFunction = llvm::bolt::HashFunction;
static void enumeration(IO &io, HashFunction &value) {
io.enumCase(value, "std-hash", HashFunction::StdHash);
io.enumCase(value, "xxh3", HashFunction::XXH3);
}
};

namespace bolt {
struct BinaryProfileHeader {
uint32_t Version{1};
Expand All @@ -188,6 +196,7 @@ struct BinaryProfileHeader {
std::string Origin; // How the profile was obtained.
std::string EventNames; // Events used for sample profile.
bool IsDFSOrder{true}; // Whether using DFS block order in function profile
llvm::bolt::HashFunction HashFunction; // Hash used for BB/BF hashing
};
} // end namespace bolt

Expand All @@ -200,6 +209,8 @@ template <> struct MappingTraits<bolt::BinaryProfileHeader> {
YamlIO.mapOptional("profile-origin", Header.Origin);
YamlIO.mapOptional("profile-events", Header.EventNames);
YamlIO.mapOptional("dfs-order", Header.IsDFSOrder);
YamlIO.mapOptional("hash-func", Header.HashFunction,
llvm::bolt::HashFunction::StdHash);
}
};

Expand Down
10 changes: 8 additions & 2 deletions bolt/lib/Core/BinaryFunction.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -3633,7 +3633,7 @@ BinaryFunction::BasicBlockListType BinaryFunction::dfs() const {
return DFS;
}

size_t BinaryFunction::computeHash(bool UseDFS,
size_t BinaryFunction::computeHash(bool UseDFS, HashFunction HashFunction,
OperandHashFuncTy OperandHashFunc) const {
if (size() == 0)
return 0;
Expand All @@ -3652,7 +3652,13 @@ size_t BinaryFunction::computeHash(bool UseDFS,
for (const BinaryBasicBlock *BB : Order)
HashString.append(hashBlock(BC, *BB, OperandHashFunc));

return Hash = llvm::xxh3_64bits(HashString);
switch (HashFunction) {
case HashFunction::StdHash:
return Hash = std::hash<std::string>{}(HashString);
case HashFunction::XXH3:
return Hash = llvm::xxh3_64bits(HashString);
}
llvm_unreachable("Unhandled HashFunction");
}

void BinaryFunction::insertBasicBlocks(
Expand Down
1 change: 1 addition & 0 deletions bolt/lib/Core/BinaryFunctionProfile.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -225,6 +225,7 @@ void BinaryFunction::mergeProfileDataInto(BinaryFunction &BF) const {
for (const BinaryBasicBlock *BBSucc : BB->successors()) {
(void)BBSucc;
assert(getIndex(BBSucc) == BF.getIndex(*BBMergeSI));
(void)BBMergeSI;

// At this point no branch count should be set to COUNT_NO_PROFILE.
assert(BII->Count != BinaryBasicBlock::COUNT_NO_PROFILE &&
Expand Down
6 changes: 4 additions & 2 deletions bolt/lib/Core/DebugData.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -889,8 +889,10 @@ void DebugStrOffsetsWriter::finalizeSection(DWARFUnit &Unit,
// Handling re-use of str-offsets section.
if (RetVal == ProcessedBaseOffsets.end() || StrOffsetSectionWasModified) {
// Writing out the header for each section.
support::endian::write(*StrOffsetsStream, CurrentSectionSize + 4,
llvm::endianness::little);
support::endian::write(
*StrOffsetsStream,
static_cast<uint32_t>(IndexToAddressMap.size() * 4 + 4),
llvm::endianness::little);
support::endian::write(*StrOffsetsStream, static_cast<uint16_t>(5),
llvm::endianness::little);
support::endian::write(*StrOffsetsStream, static_cast<uint16_t>(0),
Expand Down
6 changes: 3 additions & 3 deletions bolt/lib/Passes/IdenticalCodeFolding.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -360,9 +360,9 @@ void IdenticalCodeFolding::runOnFunctions(BinaryContext &BC) {

// Pre-compute hash before pushing into hashtable.
// Hash instruction operands to minimize hash collisions.
BF.computeHash(opts::ICFUseDFS, [&BC](const MCOperand &Op) {
return hashInstOperand(BC, Op);
});
BF.computeHash(
opts::ICFUseDFS, HashFunction::Default,
[&BC](const MCOperand &Op) { return hashInstOperand(BC, Op); });
};

ParallelUtilities::PredicateTy SkipFunc = [&](const BinaryFunction &BF) {
Expand Down
1 change: 1 addition & 0 deletions bolt/lib/Passes/VeneerElimination.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -89,6 +89,7 @@ void VeneerElimination::runOnFunctions(BinaryContext &BC) {
LLVM_DEBUG(
dbgs() << "BOLT-INFO: number of linker-inserted veneers call sites: "
<< VeneerCallers << "\n");
(void)VeneerCallers;
}

} // namespace bolt
Expand Down
40 changes: 32 additions & 8 deletions bolt/lib/Profile/StaleProfileMatching.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -225,7 +225,7 @@ class StaleMatcher {
std::unordered_map<uint16_t, std::vector<HashBlockPairType>> OpHashToBlocks;
};

void BinaryFunction::computeBlockHashes() const {
void BinaryFunction::computeBlockHashes(HashFunction HashFunction) const {
if (size() == 0)
return;

Expand All @@ -241,12 +241,26 @@ void BinaryFunction::computeBlockHashes() const {
// Hashing complete instructions.
std::string InstrHashStr = hashBlock(
BC, *BB, [&](const MCOperand &Op) { return hashInstOperand(BC, Op); });
uint64_t InstrHash = llvm::xxh3_64bits(InstrHashStr);
BlendedHashes[I].InstrHash = (uint16_t)InstrHash;
if (HashFunction == HashFunction::StdHash) {
uint64_t InstrHash = std::hash<std::string>{}(InstrHashStr);
BlendedHashes[I].InstrHash = (uint16_t)hash_value(InstrHash);
} else if (HashFunction == HashFunction::XXH3) {
uint64_t InstrHash = llvm::xxh3_64bits(InstrHashStr);
BlendedHashes[I].InstrHash = (uint16_t)InstrHash;
} else {
llvm_unreachable("Unhandled HashFunction");
}
// Hashing opcodes.
std::string OpcodeHashStr = hashBlockLoose(BC, *BB);
OpcodeHashes[I] = llvm::xxh3_64bits(OpcodeHashStr);
BlendedHashes[I].OpcodeHash = (uint16_t)OpcodeHashes[I];
if (HashFunction == HashFunction::StdHash) {
OpcodeHashes[I] = std::hash<std::string>{}(OpcodeHashStr);
BlendedHashes[I].OpcodeHash = (uint16_t)hash_value(OpcodeHashes[I]);
} else if (HashFunction == HashFunction::XXH3) {
OpcodeHashes[I] = llvm::xxh3_64bits(OpcodeHashStr);
BlendedHashes[I].OpcodeHash = (uint16_t)OpcodeHashes[I];
} else {
llvm_unreachable("Unhandled HashFunction");
}
}

// Initialize neighbor hash.
Expand All @@ -258,15 +272,25 @@ void BinaryFunction::computeBlockHashes() const {
uint64_t SuccHash = OpcodeHashes[SuccBB->getIndex()];
Hash = hashing::detail::hash_16_bytes(Hash, SuccHash);
}
BlendedHashes[I].SuccHash = (uint8_t)Hash;
if (HashFunction == HashFunction::StdHash) {
// Compatibility with old behavior.
BlendedHashes[I].SuccHash = (uint8_t)hash_value(Hash);
} else {
BlendedHashes[I].SuccHash = (uint8_t)Hash;
}

// Append hashes of predecessors.
Hash = 0;
for (BinaryBasicBlock *PredBB : BB->predecessors()) {
uint64_t PredHash = OpcodeHashes[PredBB->getIndex()];
Hash = hashing::detail::hash_16_bytes(Hash, PredHash);
}
BlendedHashes[I].PredHash = (uint8_t)Hash;
if (HashFunction == HashFunction::StdHash) {
// Compatibility with old behavior.
BlendedHashes[I].PredHash = (uint8_t)hash_value(Hash);
} else {
BlendedHashes[I].PredHash = (uint8_t)Hash;
}
}

// Assign hashes.
Expand Down Expand Up @@ -682,7 +706,7 @@ bool YAMLProfileReader::inferStaleProfile(
<< "\"" << BF.getPrintName() << "\"\n");

// Make sure that block hashes are up to date.
BF.computeBlockHashes();
BF.computeBlockHashes(YamlBP.Header.HashFunction);

const BinaryFunction::BasicBlockOrderType BlockOrder(
BF.getLayout().block_begin(), BF.getLayout().block_end());
Expand Down
18 changes: 16 additions & 2 deletions bolt/lib/Profile/YAMLProfileReader.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -83,6 +83,7 @@ bool YAMLProfileReader::parseFunctionProfile(
BinaryContext &BC = BF.getBinaryContext();

const bool IsDFSOrder = YamlBP.Header.IsDFSOrder;
const HashFunction HashFunction = YamlBP.Header.HashFunction;
bool ProfileMatched = true;
uint64_t MismatchedBlocks = 0;
uint64_t MismatchedCalls = 0;
Expand All @@ -98,7 +99,8 @@ bool YAMLProfileReader::parseFunctionProfile(
FuncRawBranchCount += YamlSI.Count;
BF.setRawBranchCount(FuncRawBranchCount);

if (!opts::IgnoreHash && YamlBF.Hash != BF.computeHash(IsDFSOrder)) {
if (!opts::IgnoreHash &&
YamlBF.Hash != BF.computeHash(IsDFSOrder, HashFunction)) {
if (opts::Verbosity >= 1)
errs() << "BOLT-WARNING: function hash mismatch\n";
ProfileMatched = false;
Expand Down Expand Up @@ -326,6 +328,17 @@ bool YAMLProfileReader::mayHaveProfileData(const BinaryFunction &BF) {
}

Error YAMLProfileReader::readProfile(BinaryContext &BC) {
if (opts::Verbosity >= 1) {
outs() << "BOLT-INFO: YAML profile with hash: ";
switch (YamlBP.Header.HashFunction) {
case HashFunction::StdHash:
outs() << "std::hash\n";
break;
case HashFunction::XXH3:
outs() << "xxh3\n";
break;
}
}
YamlProfileToFunction.resize(YamlBP.Functions.size() + 1);

auto profileMatches = [](const yaml::bolt::BinaryFunctionProfile &Profile,
Expand All @@ -348,7 +361,8 @@ Error YAMLProfileReader::readProfile(BinaryContext &BC) {

// Recompute hash once per function.
if (!opts::IgnoreHash)
Function.computeHash(YamlBP.Header.IsDFSOrder);
Function.computeHash(YamlBP.Header.IsDFSOrder,
YamlBP.Header.HashFunction);

if (profileMatches(YamlBF, Function))
matchProfileToFunction(YamlBF, Function);
Expand Down
1 change: 1 addition & 0 deletions bolt/lib/Profile/YAMLProfileWriter.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -189,6 +189,7 @@ std::error_code YAMLProfileWriter::writeProfile(const RewriteInstance &RI) {
BP.Header.Id = BuildID ? std::string(*BuildID) : "<unknown>";
BP.Header.Origin = std::string(RI.getProfileReader()->getReaderName());
BP.Header.IsDFSOrder = opts::ProfileUseDFS;
BP.Header.HashFunction = HashFunction::Default;

StringSet<> EventNames = RI.getProfileReader()->getEventNames();
if (!EventNames.empty()) {
Expand Down
5 changes: 3 additions & 2 deletions bolt/lib/Rewrite/DWARFRewriter.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -696,8 +696,9 @@ void DWARFRewriter::updateDebugInfo() {
std::optional<DWARFUnit *> SplitCU;
std::optional<uint64_t> RangesBase;
std::optional<uint64_t> DWOId = Unit->getDWOId();
StrOffstsWriter->initialize(Unit->getStringOffsetSection(),
Unit->getStringOffsetsTableContribution());
if (Unit->getVersion() >= 5)
StrOffstsWriter->initialize(Unit->getStringOffsetSection(),
Unit->getStringOffsetsTableContribution());
if (DWOId)
SplitCU = BC.getDWOCU(*DWOId);
DebugLocWriter *DebugLocWriter = createRangeLocList(*Unit);
Expand Down
Loading

0 comments on commit c2216b3

Please sign in to comment.