support running test_mobile_profiler with buck1/buck2 and OSS (#89001)

Summary:
Internally we are switching to a new version of buck, but we also must
keep this working in OSS.

Test Plan: Rely on CI.

Differential Revision: D41270673

Pull Request resolved: https://github.com/pytorch/pytorch/pull/89001
Approved by: https://github.com/r-barnes, https://github.com/osalpekar, https://github.com/malfet
This commit is contained in:
mikey dagitses
2022-11-14 22:11:29 +00:00
committed by PyTorch MergeBot
parent 074278f393
commit 3b33a2794e
3 changed files with 34 additions and 20 deletions

View File

@ -25,6 +25,7 @@ target_link_libraries(test_lite_interpreter_runtime PRIVATE torch gtest backend_
if(LINUX)
target_link_libraries(test_lite_interpreter_runtime PRIVATE "-Wl,--no-as-needed,$<TARGET_FILE:backend_with_compiler_runtime>,--as-needed")
target_link_libraries(test_lite_interpreter_runtime PRIVATE stdc++fs)
endif()
if(INSTALL_TEST)

View File

@ -0,0 +1,19 @@
#pragma once
#include <experimental/filesystem>
#include <string>
namespace torch {
namespace testing {
/// Gets the path to the resource identified by name.
///
/// @param name identifies a resource, relative path starting from the
/// repo root
inline auto getResourcePath(std::string name)
-> std::experimental::filesystem::path {
return std::move(name);
}
} // namespace testing
} // namespace torch

View File

@ -11,6 +11,8 @@
#include <torch/csrc/profiler/events.h>
#include "test/cpp/lite_interpreter_runtime/resources.h"
#ifdef EDGE_PROFILER_USE_KINETO
namespace torch {
namespace jit {
@ -42,16 +44,15 @@ bool checkMetaData(
} // namespace
TEST(MobileProfiler, ModuleHierarchy) {
std::string filePath(__FILE__);
auto testModelFile = filePath.substr(0, filePath.find_last_of("/\\") + 1);
testModelFile.append("to_be_profiled_module.ptl");
auto testModelFile = torch::testing::getResourcePath(
"test/cpp/lite_interpreter_runtime/to_be_profiled_module.ptl");
std::vector<IValue> inputs;
inputs.emplace_back(at::rand({64, 64}));
inputs.emplace_back(at::rand({64, 64}));
std::string trace_file_name("/tmp/test_trace.trace");
mobile::Module bc = _load_for_mobile(testModelFile);
mobile::Module bc = _load_for_mobile(testModelFile.string());
{
KinetoEdgeCPUProfiler profiler(
bc,
@ -95,16 +96,15 @@ TEST(MobileProfiler, ModuleHierarchy) {
}
TEST(MobileProfiler, Backend) {
std::string filePath(__FILE__);
auto testModelFile = filePath.substr(0, filePath.find_last_of("/\\") + 1);
testModelFile.append("test_backend_for_profiling.ptl");
auto testModelFile = torch::testing::getResourcePath(
"test/cpp/lite_interpreter_runtime/test_backend_for_profiling.ptl");
std::vector<IValue> inputs;
inputs.emplace_back(at::rand({64, 64}));
inputs.emplace_back(at::rand({64, 64}));
std::string trace_file_name("/tmp/test_trace_backend.trace");
mobile::Module bc = _load_for_mobile(testModelFile);
mobile::Module bc = _load_for_mobile(testModelFile.string());
{
KinetoEdgeCPUProfiler profiler(
bc,
@ -130,16 +130,15 @@ TEST(MobileProfiler, Backend) {
}
TEST(MobileProfiler, BackendMemoryEvents) {
std::string filePath(__FILE__);
auto testModelFile = filePath.substr(0, filePath.find_last_of("/\\") + 1);
testModelFile.append("test_backend_for_profiling.ptl");
auto testModelFile = torch::testing::getResourcePath(
"test/cpp/lite_interpreter_runtime/test_backend_for_profiling.ptl");
std::vector<IValue> inputs;
inputs.emplace_back(at::rand({64, 64}));
inputs.emplace_back(at::rand({64, 64}));
std::string trace_file_name("/tmp/test_trace_backend_memory.trace");
mobile::Module bc = _load_for_mobile(testModelFile);
mobile::Module bc = _load_for_mobile(testModelFile.string());
{
mobile::KinetoEdgeCPUProfiler profiler(
bc,
@ -163,13 +162,8 @@ TEST(MobileProfiler, BackendMemoryEvents) {
}
TEST(MobileProfiler, ProfilerEvent) {
/*
* TODO: Using __FILE__ is unreliable e.g. it fails to resolve correctly when
* using buck2, works ok with buck1
*/
std::string filePath(__FILE__);
auto testModelFile = filePath.substr(0, filePath.find_last_of("/\\") + 1);
testModelFile.append("test_backend_for_profiling.ptl");
auto testModelFile = torch::testing::getResourcePath(
"test/cpp/lite_interpreter_runtime/test_backend_for_profiling.ptl");
std::vector<IValue> inputs;
inputs.emplace_back(at::rand({64, 64}));
@ -180,7 +174,7 @@ TEST(MobileProfiler, ProfilerEvent) {
torch::profiler::ProfilerPerfEvents.begin(),
torch::profiler::ProfilerPerfEvents.end());
mobile::Module bc = _load_for_mobile(testModelFile);
mobile::Module bc = _load_for_mobile(testModelFile.string());
{
// Bail if something goes wrong here
try {