mirror of
https://github.com/pytorch/pytorch.git
synced 2025-10-20 12:54:11 +08:00
Fix clang-tidy warnings in Caffe2 code (#134935)
Fixes #ISSUE_NUMBER Pull Request resolved: https://github.com/pytorch/pytorch/pull/134935 Approved by: https://github.com/ezyang
This commit is contained in:
@ -210,6 +210,8 @@ include_patterns = [
|
||||
'aten/src/ATen/native/nested/*.h',
|
||||
'c10/**/*.cpp',
|
||||
'c10/**/*.h',
|
||||
'caffe2/**/*.cc',
|
||||
'caffe2/**/*.h',
|
||||
'torch/*.h',
|
||||
'torch/csrc/*.h',
|
||||
'torch/csrc/*.cpp',
|
||||
|
1
BUCK.oss
1
BUCK.oss
@ -65,7 +65,6 @@ cxx_library(
|
||||
"caffe2/serialize/file_adapter.cc",
|
||||
"caffe2/serialize/inline_container.cc",
|
||||
"caffe2/serialize/istream_adapter.cc",
|
||||
"caffe2/serialize/read_adapter_interface.cc",
|
||||
],
|
||||
visibility = ["PUBLIC"],
|
||||
deps = [
|
||||
|
@ -473,7 +473,6 @@ filegroup(
|
||||
"caffe2/serialize/file_adapter.cc",
|
||||
"caffe2/serialize/inline_container.cc",
|
||||
"caffe2/serialize/istream_adapter.cc",
|
||||
"caffe2/serialize/read_adapter_interface.cc",
|
||||
],
|
||||
)
|
||||
|
||||
|
@ -34,7 +34,6 @@ def define_targets(rules):
|
||||
"caffe2/serialize/file_adapter.cc",
|
||||
"caffe2/serialize/inline_container.cc",
|
||||
"caffe2/serialize/istream_adapter.cc",
|
||||
"caffe2/serialize/read_adapter_interface.cc",
|
||||
],
|
||||
copts = ["-fexceptions"],
|
||||
tags = [
|
||||
|
@ -3,7 +3,7 @@
|
||||
|
||||
#include <chrono>
|
||||
|
||||
#include "caffe2/core/common.h"
|
||||
#include <c10/macros/Macros.h>
|
||||
|
||||
namespace caffe2 {
|
||||
|
||||
|
@ -2,7 +2,7 @@
|
||||
// example, if your compiler did not specify -mavx, you should not provide
|
||||
// the CAFFE2_PERF_WITH_AVX macro.
|
||||
|
||||
#include "caffe2/core/common.h"
|
||||
#include "caffe2/core/macros.h"
|
||||
|
||||
#ifdef CAFFE2_PERF_WITH_AVX
|
||||
#ifndef __AVX__
|
||||
|
@ -2,7 +2,7 @@
|
||||
// example, if your compiler did not specify -mavx2, you should not provide
|
||||
// the CAFFE2_PERF_WITH_AVX2 macro.
|
||||
|
||||
#include "caffe2/core/common.h"
|
||||
#include "caffe2/core/macros.h"
|
||||
|
||||
#ifdef CAFFE2_PERF_WITH_AVX2
|
||||
#ifndef __AVX2__
|
||||
|
@ -6,8 +6,7 @@ list(APPEND Caffe2_CPU_SRCS
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/inline_container.cc
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/istream_adapter.cc
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/file_adapter.cc
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/crc.cc
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/read_adapter_interface.cc)
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/crc.cc)
|
||||
list(APPEND Caffe2_CPU_INCLUDE ${PROJECT_SOURCE_DIR}/third_party/miniz-2.1.0)
|
||||
|
||||
set(Caffe2_CPU_TEST_SRCS ${Caffe2_CPU_TEST_SRCS} PARENT_SCOPE)
|
||||
|
@ -25,7 +25,7 @@
|
||||
// using the aforementioned #defines the table is automatically fitted to your needs
|
||||
|
||||
// uint8_t, uint32_t, int32_t
|
||||
#include <stdint.h>
|
||||
#include <cstdint>
|
||||
// size_t
|
||||
#include <cstddef>
|
||||
|
||||
|
@ -3,13 +3,11 @@
|
||||
#include <cerrno>
|
||||
#include <cstdio>
|
||||
#include <string>
|
||||
#include "caffe2/core/common.h"
|
||||
|
||||
namespace caffe2 {
|
||||
namespace serialize {
|
||||
namespace caffe2::serialize {
|
||||
|
||||
FileAdapter::RAIIFile::RAIIFile(const std::string& file_name) {
|
||||
fp_ = fopen(file_name.c_str(), "rb");
|
||||
FileAdapter::RAIIFile::RAIIFile(const std::string& file_name)
|
||||
: fp_(fopen(file_name.c_str(), "rb")) {
|
||||
if (fp_ == nullptr) {
|
||||
auto old_errno = errno;
|
||||
#if defined(_WIN32) && (defined(__MINGW32__) || defined(_MSC_VER))
|
||||
@ -77,5 +75,4 @@ size_t FileAdapter::read(uint64_t pos, void* buf, size_t n, const char* what)
|
||||
|
||||
FileAdapter::~FileAdapter() = default;
|
||||
|
||||
} // namespace serialize
|
||||
} // namespace caffe2
|
||||
} // namespace caffe2::serialize
|
||||
|
@ -1,14 +1,11 @@
|
||||
#pragma once
|
||||
|
||||
#include <fstream>
|
||||
#include <memory>
|
||||
#include <c10/macros/Macros.h>
|
||||
#include <string>
|
||||
|
||||
#include "caffe2/serialize/istream_adapter.h"
|
||||
#include "caffe2/serialize/read_adapter_interface.h"
|
||||
|
||||
namespace caffe2 {
|
||||
namespace serialize {
|
||||
namespace caffe2::serialize {
|
||||
|
||||
class TORCH_API FileAdapter final : public ReadAdapterInterface {
|
||||
public:
|
||||
@ -32,5 +29,4 @@ class TORCH_API FileAdapter final : public ReadAdapterInterface {
|
||||
uint64_t size_;
|
||||
};
|
||||
|
||||
} // namespace serialize
|
||||
} // namespace caffe2
|
||||
} // namespace caffe2::serialize
|
||||
|
@ -1,10 +1,9 @@
|
||||
#pragma once
|
||||
#include <cstring>
|
||||
#include <caffe2/serialize/read_adapter_interface.h>
|
||||
#include <sys/types.h>
|
||||
#include <cstring>
|
||||
|
||||
|
||||
namespace caffe2 {
|
||||
namespace serialize {
|
||||
namespace caffe2::serialize {
|
||||
|
||||
class MemoryReadAdapter final : public caffe2::serialize::ReadAdapterInterface {
|
||||
public:
|
||||
@ -15,18 +14,18 @@ class MemoryReadAdapter final : public caffe2::serialize::ReadAdapterInterface {
|
||||
return size_;
|
||||
}
|
||||
|
||||
size_t read(uint64_t pos, void* buf, size_t n, const char* what = "")
|
||||
const override {
|
||||
(void) what;
|
||||
size_t read(
|
||||
uint64_t pos,
|
||||
void* buf,
|
||||
size_t n,
|
||||
const char* what [[maybe_unused]] = "") const override {
|
||||
memcpy(buf, (int8_t*)(data_) + pos, n);
|
||||
return n;
|
||||
}
|
||||
|
||||
private:
|
||||
const void* data_;
|
||||
off_t size_;
|
||||
off_t size_{};
|
||||
};
|
||||
|
||||
|
||||
} // namespace serialize
|
||||
} // namespace caffe2
|
||||
} // namespace caffe2::serialize
|
||||
|
@ -10,15 +10,11 @@
|
||||
#include <sys/types.h>
|
||||
#include <thread>
|
||||
|
||||
#include <c10/core/Allocator.h>
|
||||
#include <c10/core/Backend.h>
|
||||
#include <c10/core/CPUAllocator.h>
|
||||
#include <c10/core/Backend.h>
|
||||
#include <c10/util/Exception.h>
|
||||
#include <c10/util/Logging.h>
|
||||
#include <c10/util/hash.h>
|
||||
|
||||
#include "caffe2/core/common.h"
|
||||
#include "caffe2/serialize/file_adapter.h"
|
||||
#include "caffe2/serialize/inline_container.h"
|
||||
#include "caffe2/serialize/istream_adapter.h"
|
||||
@ -27,8 +23,8 @@
|
||||
#include "caffe2/serialize/versions.h"
|
||||
#include "miniz.h"
|
||||
|
||||
namespace caffe2 {
|
||||
namespace serialize {
|
||||
|
||||
namespace caffe2::serialize {
|
||||
constexpr c10::string_view kDebugPklSuffix(".debug_pkl");
|
||||
|
||||
struct MzZipReaderIterWrapper {
|
||||
@ -194,8 +190,7 @@ void PyTorchStreamReader::init() {
|
||||
|
||||
// version check
|
||||
at::DataPtr version_ptr;
|
||||
// NOLINTNEXTLINE(cppcoreguidelines-init-variables)
|
||||
size_t version_size;
|
||||
size_t version_size = 0;
|
||||
if (hasRecord(".data/version")) {
|
||||
std::tie(version_ptr, version_size) = getRecord(".data/version");
|
||||
} else {
|
||||
@ -204,7 +199,7 @@ void PyTorchStreamReader::init() {
|
||||
}
|
||||
std::string version(static_cast<const char*>(version_ptr.get()), version_size);
|
||||
try {
|
||||
version_ = std::stoull(version);
|
||||
version_ = std::stoll(version);
|
||||
} catch (const std::invalid_argument& e) {
|
||||
CAFFE_THROW("Couldn't parse the version ",
|
||||
version,
|
||||
@ -627,7 +622,7 @@ PyTorchStreamWriter::PyTorchStreamWriter(const std::string& file_name)
|
||||
}
|
||||
|
||||
PyTorchStreamWriter::PyTorchStreamWriter(
|
||||
const std::function<size_t(const void*, size_t)> writer_func)
|
||||
const std::function<size_t(const void*, size_t)>& writer_func)
|
||||
: archive_name_("archive"),
|
||||
writer_func_(writer_func) {
|
||||
setup(archive_name_);
|
||||
@ -638,7 +633,7 @@ void PyTorchStreamWriter::setup(const string& file_name) {
|
||||
memset(ar_.get(), 0, sizeof(mz_zip_archive));
|
||||
archive_name_plus_slash_ = archive_name_ + "/"; // for writeRecord().
|
||||
|
||||
if (archive_name_.size() == 0) {
|
||||
if (archive_name_.empty()) {
|
||||
CAFFE_THROW("invalid file name: ", file_name);
|
||||
}
|
||||
if (!writer_func_) {
|
||||
@ -649,7 +644,7 @@ void PyTorchStreamWriter::setup(const string& file_name) {
|
||||
|
||||
const std::string dir_name = parentdir(file_name);
|
||||
if(!dir_name.empty()) {
|
||||
struct stat st;
|
||||
struct stat st{};
|
||||
bool dir_exists = (stat(dir_name.c_str(), &st) == 0 && (st.st_mode & S_IFDIR));
|
||||
TORCH_CHECK(dir_exists, "Parent directory ", dir_name, " does not exist.");
|
||||
}
|
||||
@ -706,8 +701,8 @@ void PyTorchStreamWriter::writeRecord(
|
||||
/*uncomp_size=*/0,
|
||||
/*uncomp_crc32=*/0,
|
||||
/*last_modified=*/nullptr,
|
||||
/*user_extra_data=*/padding_.c_str(),
|
||||
/*user_extra_data_len=*/padding_size,
|
||||
/*user_extra_data_local=*/padding_.c_str(),
|
||||
/*user_extra_data_local_len=*/padding_size,
|
||||
/*user_extra_data_central=*/nullptr,
|
||||
/*user_extra_data_central_len=*/0);
|
||||
valid("writing file ", name.c_str());
|
||||
@ -820,5 +815,4 @@ PyTorchStreamWriter::~PyTorchStreamWriter() {
|
||||
}
|
||||
}
|
||||
|
||||
} // namespace serialize
|
||||
} // namespace caffe2
|
||||
} // namespace caffe2::serialize
|
||||
|
@ -6,7 +6,6 @@
|
||||
#include <fstream>
|
||||
#include <istream>
|
||||
#include <mutex>
|
||||
#include <ostream>
|
||||
#include <unordered_set>
|
||||
|
||||
#include <c10/core/Allocator.h>
|
||||
@ -91,8 +90,8 @@ typedef struct mz_zip_archive mz_zip_archive;
|
||||
// model.json as the last file when writing after we have accumulated all
|
||||
// other information.
|
||||
|
||||
namespace caffe2 {
|
||||
namespace serialize {
|
||||
|
||||
namespace caffe2::serialize {
|
||||
|
||||
static constexpr const char* kSerializationIdRecordName = ".data/serialization_id";
|
||||
|
||||
@ -196,18 +195,18 @@ class TORCH_API PyTorchStreamReader final {
|
||||
std::string archive_name_;
|
||||
std::string archive_name_plus_slash_;
|
||||
std::shared_ptr<ReadAdapterInterface> in_;
|
||||
int64_t version_;
|
||||
int64_t version_{};
|
||||
std::mutex reader_lock_;
|
||||
bool load_debug_symbol_ = true;
|
||||
std::string serialization_id_;
|
||||
size_t additional_reader_size_threshold_;
|
||||
size_t additional_reader_size_threshold_{};
|
||||
};
|
||||
|
||||
class TORCH_API PyTorchStreamWriter final {
|
||||
public:
|
||||
explicit PyTorchStreamWriter(const std::string& archive_name);
|
||||
explicit PyTorchStreamWriter(
|
||||
const std::function<size_t(const void*, size_t)> writer_func);
|
||||
const std::function<size_t(const void*, size_t)>& writer_func);
|
||||
|
||||
void setMinVersion(const uint64_t version);
|
||||
|
||||
@ -274,5 +273,4 @@ size_t getPadding(
|
||||
std::string& padding_buf);
|
||||
} // namespace detail
|
||||
|
||||
} // namespace serialize
|
||||
} // namespace caffe2
|
||||
} // namespace caffe2::serialize
|
||||
|
@ -5,12 +5,14 @@
|
||||
|
||||
#include <gtest/gtest.h>
|
||||
|
||||
#include "caffe2/serialize/inline_container.h"
|
||||
#include <c10/util/Logging.h>
|
||||
#include "c10/util/irange.h"
|
||||
#include <c10/util/irange.h>
|
||||
#include "caffe2/serialize/inline_container.h"
|
||||
#include "caffe2/serialize/istream_adapter.h"
|
||||
|
||||
namespace caffe2 {
|
||||
namespace serialize {
|
||||
|
||||
// NOLINTBEGIN(*-narrowing-conversions)
|
||||
namespace caffe2::serialize {
|
||||
namespace {
|
||||
|
||||
TEST(PyTorchStreamWriterAndReader, SaveAndLoad) {
|
||||
@ -19,7 +21,7 @@ TEST(PyTorchStreamWriterAndReader, SaveAndLoad) {
|
||||
std::ostringstream oss;
|
||||
// write records through writers
|
||||
PyTorchStreamWriter writer([&](const void* b, size_t n) -> size_t {
|
||||
oss.write(static_cast<const char*>(b), n);
|
||||
oss.write(static_cast<const char*>(b), static_cast<std::streamsize>(n));
|
||||
return oss ? n : 0;
|
||||
});
|
||||
// NOLINTNEXTLINE(cppcoreguidelines-pro-type-member-init,cppcoreguidelines-avoid-magic-numbers)
|
||||
@ -28,14 +30,14 @@ TEST(PyTorchStreamWriterAndReader, SaveAndLoad) {
|
||||
std::vector<uint8_t> buf(data1.size());
|
||||
|
||||
for (auto i : c10::irange(data1.size())) {
|
||||
data1[i] = data1.size() - i;
|
||||
data1[i] = static_cast<char>(data1.size() - i);
|
||||
}
|
||||
writer.writeRecord("key1", data1.data(), data1.size());
|
||||
|
||||
// NOLINTNEXTLINE(cppcoreguidelines-pro-type-member-init,cppcoreguidelines-avoid-magic-numbers)
|
||||
std::array<char, 64> data2;
|
||||
for (auto i : c10::irange(data2.size())) {
|
||||
data2[i] = data2.size() - i;
|
||||
data2[i] = static_cast<char>(data2.size() - i);
|
||||
}
|
||||
writer.writeRecord("key2", data2.data(), data2.size());
|
||||
|
||||
@ -149,7 +151,7 @@ TEST(PyTorchStreamWriterAndReader, LoadWithMultiThreads) {
|
||||
PyTorchStreamReader reader(&iss);
|
||||
reader.setAdditionalReaderSizeThreshold(0);
|
||||
// before testing, sanity check
|
||||
int64_t size1, size2, ret;
|
||||
int64_t size1 = 0, size2 = 0, ret = 0;
|
||||
at::DataPtr data_ptr;
|
||||
std::tie(data_ptr, size1) = reader.getRecord("key1");
|
||||
std::tie(data_ptr, size2) = reader.getRecord("key2");
|
||||
@ -296,7 +298,7 @@ TEST(PytorchStreamWriterAndReader, SkipDebugRecords) {
|
||||
reader.setShouldLoadDebugSymbol(false);
|
||||
EXPECT_FALSE(reader.hasRecord("key1.debug_pkl"));
|
||||
at::DataPtr ptr;
|
||||
size_t size;
|
||||
size_t size = 0;
|
||||
std::tie(ptr, size) = reader.getRecord("key1.debug_pkl");
|
||||
EXPECT_EQ(size, 0);
|
||||
std::vector<uint8_t> dst(data1.size());
|
||||
@ -479,5 +481,5 @@ TEST_P(ChunkRecordIteratorTest, ChunkRead) {
|
||||
}
|
||||
|
||||
} // namespace
|
||||
} // namespace serialize
|
||||
} // namespace caffe2
|
||||
} // namespace caffe2::serialize
|
||||
// NOLINTEND(*-narrowing-conversions)
|
||||
|
@ -1,8 +1,7 @@
|
||||
#include "caffe2/serialize/istream_adapter.h"
|
||||
#include <c10/util/Exception.h>
|
||||
|
||||
namespace caffe2 {
|
||||
namespace serialize {
|
||||
namespace caffe2::serialize {
|
||||
|
||||
IStreamAdapter::IStreamAdapter(std::istream* istream) : istream_(istream) {}
|
||||
|
||||
@ -33,8 +32,6 @@ void IStreamAdapter::validate(const char* what) const {
|
||||
}
|
||||
}
|
||||
|
||||
// NOLINTNEXTLINE(modernize-use-equals-default)
|
||||
IStreamAdapter::~IStreamAdapter() {}
|
||||
IStreamAdapter::~IStreamAdapter() = default;
|
||||
|
||||
} // namespace serialize
|
||||
} // namespace caffe2
|
||||
} // namespace caffe2::serialize
|
||||
|
@ -5,8 +5,7 @@
|
||||
#include "c10/macros/Macros.h"
|
||||
#include "caffe2/serialize/read_adapter_interface.h"
|
||||
|
||||
namespace caffe2 {
|
||||
namespace serialize {
|
||||
namespace caffe2::serialize {
|
||||
|
||||
// this is a reader implemented by std::istream
|
||||
class TORCH_API IStreamAdapter final : public ReadAdapterInterface {
|
||||
@ -23,5 +22,4 @@ class TORCH_API IStreamAdapter final : public ReadAdapterInterface {
|
||||
void validate(const char* what) const;
|
||||
};
|
||||
|
||||
} // namespace serialize
|
||||
} // namespace caffe2
|
||||
} // namespace caffe2::serialize
|
||||
|
@ -1,10 +0,0 @@
|
||||
#include "caffe2/serialize/read_adapter_interface.h"
|
||||
|
||||
namespace caffe2 {
|
||||
namespace serialize {
|
||||
|
||||
// NOLINTNEXTLINE(modernize-use-equals-default)
|
||||
ReadAdapterInterface::~ReadAdapterInterface() {}
|
||||
|
||||
} // namespace serialize
|
||||
} // namespace caffe2
|
@ -3,21 +3,19 @@
|
||||
#include <cstddef>
|
||||
#include <cstdint>
|
||||
|
||||
#include "c10/macros/Macros.h"
|
||||
#include <c10/macros/Export.h>
|
||||
|
||||
namespace caffe2 {
|
||||
namespace serialize {
|
||||
namespace caffe2::serialize {
|
||||
|
||||
// this is the interface for the (file/stream/memory) reader in
|
||||
// PyTorchStreamReader. with this interface, we can extend the support
|
||||
// PyTorchStreamReader. With this interface, we can extend the support
|
||||
// besides standard istream
|
||||
class TORCH_API ReadAdapterInterface {
|
||||
public:
|
||||
virtual size_t size() const = 0;
|
||||
virtual size_t read(uint64_t pos, void* buf, size_t n, const char* what = "")
|
||||
const = 0;
|
||||
virtual ~ReadAdapterInterface();
|
||||
virtual ~ReadAdapterInterface() = default;
|
||||
};
|
||||
|
||||
} // namespace serialize
|
||||
} // namespace caffe2
|
||||
} // namespace caffe2::serialize
|
||||
|
@ -1,8 +1,7 @@
|
||||
#pragma once
|
||||
#include <cstdint>
|
||||
|
||||
namespace caffe2 {
|
||||
namespace serialize {
|
||||
namespace caffe2::serialize {
|
||||
|
||||
constexpr uint64_t kMinSupportedFileFormatVersion = 0x1L;
|
||||
|
||||
@ -129,5 +128,4 @@ constexpr uint64_t kProducedBytecodeVersion = 0x8L;
|
||||
constexpr uint64_t kMinSupportedBytecodeVersion = 0x4L;
|
||||
constexpr uint64_t kMaxSupportedBytecodeVersion = 0x9L;
|
||||
|
||||
} // namespace serialize
|
||||
} // namespace caffe2
|
||||
} // namespace caffe2::serialize
|
||||
|
@ -51,7 +51,7 @@ size_t editDistance(
|
||||
(c)=(uint8_t)(s)[(i)++]; \
|
||||
}
|
||||
|
||||
int32_t editDistanceHelper(const char* s1,
|
||||
size_t editDistanceHelper(const char* s1,
|
||||
size_t s1_len,
|
||||
const char* s2,
|
||||
size_t s2_len,
|
||||
|
@ -39,7 +39,7 @@ TORCH_API inline bool EndsWith(
|
||||
}
|
||||
}
|
||||
|
||||
TORCH_API int32_t editDistanceHelper(
|
||||
TORCH_API size_t editDistanceHelper(
|
||||
const char* s1,
|
||||
size_t s1_len,
|
||||
const char* s2,
|
||||
|
@ -1,15 +1,11 @@
|
||||
#ifndef CAFFE2_UTILS_THREADPOOL_H_
|
||||
#define CAFFE2_UTILS_THREADPOOL_H_
|
||||
|
||||
#include "ThreadPoolCommon.h"
|
||||
|
||||
#include <atomic>
|
||||
#include <functional>
|
||||
#include <memory>
|
||||
#include <mutex>
|
||||
#include <vector>
|
||||
|
||||
#include "caffe2/core/common.h"
|
||||
#include <c10/macros/Macros.h>
|
||||
|
||||
//
|
||||
// A work-stealing threadpool loosely based off of pthreadpool
|
||||
|
@ -1,9 +1,9 @@
|
||||
/* Standard C headers */
|
||||
#include <stdint.h>
|
||||
#include <stdbool.h>
|
||||
#include <stdlib.h>
|
||||
#include <string.h>
|
||||
#include <assert.h>
|
||||
#include <cstdint>
|
||||
|
||||
#include <cstdlib>
|
||||
#include <cstring>
|
||||
#include <cassert>
|
||||
#include <limits>
|
||||
|
||||
#ifdef _MSC_VER
|
||||
@ -71,8 +71,8 @@ void legacy_pthreadpool_compute_1d_tiled(
|
||||
}
|
||||
|
||||
struct compute_2d_context {
|
||||
legacy_pthreadpool_function_2d_t function;
|
||||
void* argument;
|
||||
legacy_pthreadpool_function_2d_t function{};
|
||||
void* argument{};
|
||||
caffe2::FixedDivisor<int32_t> range_j;
|
||||
};
|
||||
|
||||
@ -80,8 +80,8 @@ static void compute_2d(void* context_, size_t linear_index) {
|
||||
TORCH_DCHECK_LE(linear_index, std::numeric_limits<int32_t>::max());
|
||||
|
||||
const struct compute_2d_context* context = static_cast<compute_2d_context*>(context_);
|
||||
int32_t q;
|
||||
int32_t r;
|
||||
int32_t q = 0;
|
||||
int32_t r = 0;
|
||||
context->range_j.DivMod(static_cast<int32_t>(linear_index), &q, &r);
|
||||
context->function(context->argument, q, r);
|
||||
}
|
||||
@ -112,18 +112,18 @@ void legacy_pthreadpool_compute_2d(
|
||||
}
|
||||
|
||||
struct compute_2d_tiled_context {
|
||||
legacy_pthreadpool_function_2d_tiled_t function;
|
||||
void* argument;
|
||||
legacy_pthreadpool_function_2d_tiled_t function{};
|
||||
void* argument{};
|
||||
caffe2::FixedDivisor<int32_t> tile_range_j;
|
||||
size_t range_i;
|
||||
size_t range_j;
|
||||
size_t tile_i;
|
||||
size_t tile_j;
|
||||
size_t range_i{};
|
||||
size_t range_j{};
|
||||
size_t tile_i{};
|
||||
size_t tile_j{};
|
||||
};
|
||||
|
||||
static void compute_2d_tiled(void* context_, size_t linear_index) {
|
||||
int32_t q;
|
||||
int32_t r;
|
||||
int32_t q = 0;
|
||||
int32_t r = 0;
|
||||
|
||||
const struct compute_2d_tiled_context* context = static_cast<compute_2d_tiled_context*>(context_);
|
||||
context->tile_range_j.DivMod(linear_index, &q, &r);
|
||||
@ -172,26 +172,26 @@ void legacy_pthreadpool_compute_2d_tiled(
|
||||
}
|
||||
|
||||
struct compute_3d_tiled_context {
|
||||
legacy_pthreadpool_function_3d_tiled_t function;
|
||||
void* argument;
|
||||
legacy_pthreadpool_function_3d_tiled_t function{};
|
||||
void* argument{};
|
||||
caffe2::FixedDivisor<int32_t> tile_range_j;
|
||||
caffe2::FixedDivisor<int32_t> tile_range_k;
|
||||
size_t range_i;
|
||||
size_t range_j;
|
||||
size_t range_k;
|
||||
size_t tile_i;
|
||||
size_t tile_j;
|
||||
size_t tile_k;
|
||||
size_t range_i{};
|
||||
size_t range_j{};
|
||||
size_t range_k{};
|
||||
size_t tile_i{};
|
||||
size_t tile_j{};
|
||||
size_t tile_k{};
|
||||
};
|
||||
|
||||
static void compute_3d_tiled(
|
||||
void* context_,
|
||||
size_t linear_index) {
|
||||
int32_t tile_index_ij, tile_index_k;
|
||||
int32_t tile_index_ij = 0, tile_index_k = 0;
|
||||
const struct compute_3d_tiled_context* context = static_cast<compute_3d_tiled_context*>(context_);
|
||||
context->tile_range_k.DivMod(
|
||||
static_cast<int32_t>(linear_index), &tile_index_ij, &tile_index_k);
|
||||
int32_t tile_index_i, tile_index_j;
|
||||
int32_t tile_index_i = 0, tile_index_j = 0;
|
||||
context->tile_range_j.DivMod(tile_index_ij, &tile_index_i, &tile_index_j);
|
||||
const size_t max_tile_i = context->tile_i;
|
||||
const size_t max_tile_j = context->tile_j;
|
||||
@ -261,31 +261,31 @@ void legacy_pthreadpool_compute_3d_tiled(
|
||||
}
|
||||
|
||||
struct compute_4d_tiled_context {
|
||||
legacy_pthreadpool_function_4d_tiled_t function;
|
||||
void* argument;
|
||||
legacy_pthreadpool_function_4d_tiled_t function{};
|
||||
void* argument{};
|
||||
caffe2::FixedDivisor<int32_t> tile_range_kl;
|
||||
caffe2::FixedDivisor<int32_t> tile_range_j;
|
||||
caffe2::FixedDivisor<int32_t> tile_range_l;
|
||||
size_t range_i;
|
||||
size_t range_j;
|
||||
size_t range_k;
|
||||
size_t range_l;
|
||||
size_t tile_i;
|
||||
size_t tile_j;
|
||||
size_t tile_k;
|
||||
size_t tile_l;
|
||||
size_t range_i{};
|
||||
size_t range_j{};
|
||||
size_t range_k{};
|
||||
size_t range_l{};
|
||||
size_t tile_i{};
|
||||
size_t tile_j{};
|
||||
size_t tile_k{};
|
||||
size_t tile_l{};
|
||||
};
|
||||
|
||||
static void compute_4d_tiled(
|
||||
void* context_,
|
||||
size_t linear_index) {
|
||||
int32_t tile_index_ij, tile_index_kl;
|
||||
int32_t tile_index_ij = 0, tile_index_kl = 0;
|
||||
const struct compute_4d_tiled_context* context = static_cast<compute_4d_tiled_context*>(context_);
|
||||
context->tile_range_kl.DivMod(
|
||||
static_cast<int32_t>(linear_index), &tile_index_ij, &tile_index_kl);
|
||||
int32_t tile_index_i, tile_index_j;
|
||||
int32_t tile_index_i = 0, tile_index_j = 0;
|
||||
context->tile_range_j.DivMod(tile_index_ij, &tile_index_i, &tile_index_j);
|
||||
int32_t tile_index_k, tile_index_l;
|
||||
int32_t tile_index_k = 0, tile_index_l = 0;
|
||||
context->tile_range_l.DivMod(tile_index_kl, &tile_index_k, &tile_index_l);
|
||||
const size_t max_tile_i = context->tile_i;
|
||||
const size_t max_tile_j = context->tile_j;
|
||||
|
@ -7,9 +7,7 @@ namespace caffe2 {
|
||||
namespace {
|
||||
static thread_local bool using_new_threadpool{false};
|
||||
}
|
||||
WithCastToNewThreadPool::WithCastToNewThreadPool(bool use_new_threadpool) {
|
||||
use_new_threadpool_ = using_new_threadpool;
|
||||
using_new_threadpool = use_new_threadpool;
|
||||
WithCastToNewThreadPool::WithCastToNewThreadPool(bool use_new_threadpool) : use_new_threadpool_(using_new_threadpool) {
|
||||
}
|
||||
WithCastToNewThreadPool::~WithCastToNewThreadPool() {
|
||||
using_new_threadpool = use_new_threadpool_;
|
||||
|
@ -1,5 +1,6 @@
|
||||
#include <gtest/gtest.h>
|
||||
|
||||
#include <caffe2/serialize/istream_adapter.h>
|
||||
#include <test/cpp/jit/test_custom_class_registrations.h>
|
||||
#include <torch/csrc/jit/passes/freeze_module.h>
|
||||
#include <torch/custom_class.h>
|
||||
|
@ -2,6 +2,7 @@
|
||||
#include <c10/util/Exception.h>
|
||||
#include <caffe2/serialize/file_adapter.h>
|
||||
#include <caffe2/serialize/inline_container.h>
|
||||
#include <caffe2/serialize/istream_adapter.h>
|
||||
#include <torch/csrc/jit/mobile/compatibility/backport_manager.h>
|
||||
#include <torch/csrc/jit/mobile/compatibility/model_compatibility.h>
|
||||
#include <torch/csrc/jit/mobile/import.h>
|
||||
|
@ -1,6 +1,7 @@
|
||||
#include <ATen/core/ivalue.h>
|
||||
#include <caffe2/serialize/file_adapter.h>
|
||||
#include <caffe2/serialize/inline_container.h>
|
||||
#include <caffe2/serialize/istream_adapter.h>
|
||||
#include <torch/csrc/jit/api/compilation_unit.h> // removed after using simple type_resolver/obj_loader
|
||||
#include <torch/csrc/jit/mobile/compatibility/model_compatibility.h>
|
||||
#include <torch/csrc/jit/mobile/file_format.h>
|
||||
|
Reference in New Issue
Block a user