-
Notifications
You must be signed in to change notification settings - Fork 15k
[Offload][Conformance] Update olMemFree calls in conformance tests #157773
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Merged
jhuber6
merged 1 commit into
llvm:users/RossBrunton/newMemInfo1
from
leandrolcampos:OffloadMemInfo
Sep 10, 2025
Merged
[Offload][Conformance] Update olMemFree calls in conformance tests #157773
jhuber6
merged 1 commit into
llvm:users/RossBrunton/newMemInfo1
from
leandrolcampos:OffloadMemInfo
Sep 10, 2025
+14
−34
Conversation
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
@llvm/pr-subscribers-offload Author: Leandro Lacerda (leandrolcampos) ChangesThis PR is a follow-up to the change introduced in #157478, which added a Full diff: https://github.com/llvm/llvm-project/pull/157773.diff 4 Files Affected:
diff --git a/offload/unittests/Conformance/include/mathtest/DeviceContext.hpp b/offload/unittests/Conformance/include/mathtest/DeviceContext.hpp
index 7a11798856550..95e90139593f9 100644
--- a/offload/unittests/Conformance/include/mathtest/DeviceContext.hpp
+++ b/offload/unittests/Conformance/include/mathtest/DeviceContext.hpp
@@ -57,13 +57,13 @@ class DeviceContext {
explicit DeviceContext(llvm::StringRef Platform, std::size_t DeviceId = 0);
template <typename T>
- ManagedBuffer<T> createManagedBuffer(std::size_t Size) noexcept {
+ ManagedBuffer<T> createManagedBuffer(std::size_t Size) const noexcept {
void *UntypedAddress = nullptr;
detail::allocManagedMemory(DeviceHandle, Size * sizeof(T), &UntypedAddress);
T *TypedAddress = static_cast<T *>(UntypedAddress);
- return ManagedBuffer<T>(getPlatformHandle(), TypedAddress, Size);
+ return ManagedBuffer<T>(PlatformHandle, TypedAddress, Size);
}
[[nodiscard]] llvm::Expected<std::shared_ptr<DeviceImage>>
@@ -120,9 +120,6 @@ class DeviceContext {
[[nodiscard]] llvm::StringRef getPlatform() const noexcept;
- [[nodiscard]] llvm::Expected<ol_platform_handle_t>
- getPlatformHandle() noexcept;
-
private:
[[nodiscard]] llvm::Expected<ol_symbol_handle_t>
getKernelHandle(ol_program_handle_t ProgramHandle,
@@ -134,7 +131,7 @@ class DeviceContext {
std::size_t GlobalDeviceId;
ol_device_handle_t DeviceHandle;
- ol_platform_handle_t PlatformHandle = nullptr;
+ ol_platform_handle_t PlatformHandle;
};
} // namespace mathtest
diff --git a/offload/unittests/Conformance/include/mathtest/DeviceResources.hpp b/offload/unittests/Conformance/include/mathtest/DeviceResources.hpp
index 6084732baf6ee..d6d9be6525f5e 100644
--- a/offload/unittests/Conformance/include/mathtest/DeviceResources.hpp
+++ b/offload/unittests/Conformance/include/mathtest/DeviceResources.hpp
@@ -47,7 +47,8 @@ template <typename T> class [[nodiscard]] ManagedBuffer {
ManagedBuffer &operator=(const ManagedBuffer &) = delete;
ManagedBuffer(ManagedBuffer &&Other) noexcept
- : Address(Other.Address), Size(Other.Size) {
+ : Platform(Other.Platform), Address(Other.Address), Size(Other.Size) {
+ Other.Platform = nullptr;
Other.Address = nullptr;
Other.Size = 0;
}
@@ -59,9 +60,11 @@ template <typename T> class [[nodiscard]] ManagedBuffer {
if (Address)
detail::freeDeviceMemory(Platform, Address);
+ Platform = Other.Platform;
Address = Other.Address;
Size = Other.Size;
+ Other.Platform = nullptr;
Other.Address = nullptr;
Other.Size = 0;
@@ -89,7 +92,7 @@ template <typename T> class [[nodiscard]] ManagedBuffer {
std::size_t Size) noexcept
: Platform(Platform), Address(Address), Size(Size) {}
- ol_platform_handle_t Platform;
+ ol_platform_handle_t Platform = nullptr;
T *Address = nullptr;
std::size_t Size = 0;
};
diff --git a/offload/unittests/Conformance/include/mathtest/GpuMathTest.hpp b/offload/unittests/Conformance/include/mathtest/GpuMathTest.hpp
index fdf30d58ae1e7..b88d6e9aebdc8 100644
--- a/offload/unittests/Conformance/include/mathtest/GpuMathTest.hpp
+++ b/offload/unittests/Conformance/include/mathtest/GpuMathTest.hpp
@@ -75,7 +75,7 @@ class [[nodiscard]] GpuMathTest final {
ResultType run(GeneratorType &Generator,
std::size_t BufferSize = DefaultBufferSize,
- uint32_t GroupSize = DefaultGroupSize) noexcept {
+ uint32_t GroupSize = DefaultGroupSize) const noexcept {
assert(BufferSize > 0 && "Buffer size must be a positive value");
assert(GroupSize > 0 && "Group size must be a positive value");
@@ -128,7 +128,7 @@ class [[nodiscard]] GpuMathTest final {
return *ExpectedKernel;
}
- [[nodiscard]] auto createBuffers(std::size_t BufferSize) {
+ [[nodiscard]] auto createBuffers(std::size_t BufferSize) const {
auto InBuffersTuple = std::apply(
[&](auto... InTypeIdentities) {
return std::make_tuple(
diff --git a/offload/unittests/Conformance/lib/DeviceContext.cpp b/offload/unittests/Conformance/lib/DeviceContext.cpp
index d72f56ca1f175..987d7841fa763 100644
--- a/offload/unittests/Conformance/lib/DeviceContext.cpp
+++ b/offload/unittests/Conformance/lib/DeviceContext.cpp
@@ -103,6 +103,7 @@ getPlatformBackend(ol_platform_handle_t PlatformHandle) noexcept {
struct Device {
ol_device_handle_t Handle;
+ ol_platform_handle_t PlatformHandle;
std::string Name;
std::string Platform;
ol_platform_backend_t Backend;
@@ -124,7 +125,7 @@ const std::vector<Device> &getDevices() {
auto Platform = getPlatformName(PlatformHandle);
static_cast<std::vector<Device> *>(Data)->push_back(
- {DeviceHandle, Name, Platform, Backend});
+ {DeviceHandle, PlatformHandle, Name, Platform, Backend});
}
return true;
@@ -175,6 +176,7 @@ DeviceContext::DeviceContext(std::size_t GlobalDeviceId)
llvm::Twine(Devices.size()));
DeviceHandle = Devices[GlobalDeviceId].Handle;
+ PlatformHandle = Devices[GlobalDeviceId].PlatformHandle;
}
DeviceContext::DeviceContext(llvm::StringRef Platform, std::size_t DeviceId)
@@ -210,6 +212,7 @@ DeviceContext::DeviceContext(llvm::StringRef Platform, std::size_t DeviceId)
GlobalDeviceId = *FoundGlobalDeviceId;
DeviceHandle = Devices[GlobalDeviceId].Handle;
+ PlatformHandle = Devices[GlobalDeviceId].PlatformHandle;
}
[[nodiscard]] llvm::Expected<std::shared_ptr<DeviceImage>>
@@ -286,29 +289,6 @@ DeviceContext::getKernelHandle(ol_program_handle_t ProgramHandle,
return Handle;
}
-llvm::Expected<ol_platform_handle_t>
-DeviceContext::getPlatformHandle() noexcept {
- if (!PlatformHandle) {
- const ol_result_t OlResult =
- olGetDeviceInfo(DeviceHandle, OL_DEVICE_INFO_PLATFORM,
- sizeof(PlatformHandle), &PlatformHandle);
-
- if (OlResult != OL_SUCCESS) {
- PlatformHandle = nullptr;
- llvm::StringRef Details =
- OlResult->Details ? OlResult->Details : "No details provided";
-
- // clang-format off
- return llvm::createStringError(
- llvm::Twine(Details) +
- " (code " + llvm::Twine(OlResult->Code) + ")");
- // clang-format on
- }
- }
-
- return PlatformHandle;
-}
-
void DeviceContext::launchKernelImpl(
ol_symbol_handle_t KernelHandle, uint32_t NumGroups, uint32_t GroupSize,
const void *KernelArgs, std::size_t KernelArgsSize) const noexcept {
|
jhuber6
approved these changes
Sep 10, 2025
@jhuber6 This was merged into my user branch, was that intentional? |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment
Add this suggestion to a batch that can be applied as a single commit.
This suggestion is invalid because no changes were made to the code.
Suggestions cannot be applied while the pull request is closed.
Suggestions cannot be applied while viewing a subset of changes.
Only one suggestion per line can be applied in a batch.
Add this suggestion to a batch that can be applied as a single commit.
Applying suggestions on deleted lines is not supported.
You must change the existing code in this line in order to create a valid suggestion.
Outdated suggestions cannot be applied.
This suggestion has been applied or marked resolved.
Suggestions cannot be applied from pending reviews.
Suggestions cannot be applied on multi-line comments.
Suggestions cannot be applied while the pull request is queued to merge.
Suggestion cannot be applied right now. Please check back later.
This PR is a follow-up to the change introduced in #157478, which added a
platform
parameter to theolMemFree
function.