[BE][clang-format] make macro PyObject_HEAD have its own line (#136945)

Pull Request resolved: https://github.com/pytorch/pytorch/pull/136945
Approved by: https://github.com/albanD
This commit is contained in:
Xuehai Pan
2024-10-02 23:26:51 +08:00
committed by PyTorch MergeBot
parent 54f50f19eb
commit 89c37be6b7
17 changed files with 51 additions and 24 deletions

View File

@ -44,7 +44,9 @@ ContinuationIndentWidth: 4
Cpp11BracedListStyle: true
DerivePointerAlignment: false
DisableFormat: false
ForEachMacros: [ FOR_EACH_RANGE, FOR_EACH, ]
ForEachMacros:
- FOR_EACH_RANGE
- FOR_EACH
IncludeCategories:
- Regex: '^<.*\.h(pp)?>'
Priority: 1
@ -79,7 +81,11 @@ SpacesInContainerLiterals: true
SpacesInCStyleCastParentheses: false
SpacesInParentheses: false
SpacesInSquareBrackets: false
Standard: Cpp11
Standard: c++17
StatementMacros:
- PyObject_HEAD
- PyObject_VAR_HEAD
- PyException_HEAD
TabWidth: 8
UseTab: Never
---

View File

@ -43,7 +43,9 @@ ContinuationIndentWidth: 4
Cpp11BracedListStyle: true
DerivePointerAlignment: false
DisableFormat: false
ForEachMacros: [ FOR_EACH_RANGE, FOR_EACH, ]
ForEachMacros:
- FOR_EACH_RANGE
- FOR_EACH
IncludeCategories:
- Regex: '^<.*\.h(pp)?>'
Priority: 1
@ -81,7 +83,11 @@ SpacesInContainerLiterals: true
SpacesInCStyleCastParentheses: false
SpacesInParentheses: false
SpacesInSquareBrackets: false
Standard: Cpp11
Standard: c++17
StatementMacros:
- PyObject_HEAD
- PyObject_VAR_HEAD
- PyException_HEAD
TabWidth: 8
UseTab: Never
...

View File

@ -7,7 +7,8 @@
// NOLINTNEXTLINE(cppcoreguidelines-pro-type-member-init)
struct TORCH_API THPDevice {
PyObject_HEAD at::Device device;
PyObject_HEAD
at::Device device;
};
TORCH_API extern PyTypeObject THPDeviceType;

View File

@ -7,7 +7,8 @@
constexpr int DTYPE_NAME_LEN = 64;
struct TORCH_API THPDtype {
PyObject_HEAD at::ScalarType scalar_type;
PyObject_HEAD
at::ScalarType scalar_type;
// NOLINTNEXTLINE(cppcoreguidelines-avoid-c-arrays,modernize-avoid-c-arrays)
char name[DTYPE_NAME_LEN + 1];
};

View File

@ -5,7 +5,8 @@
#include <torch/csrc/python_headers.h>
struct TORCH_API THPEvent {
PyObject_HEAD c10::Event event;
PyObject_HEAD
c10::Event event;
};
TORCH_API extern PyTypeObject* THPEventClass;
TORCH_API extern PyTypeObject THPEventType;

View File

@ -6,7 +6,8 @@
// NOLINTNEXTLINE(cppcoreguidelines-pro-type-member-init)
struct THPGenerator {
PyObject_HEAD at::Generator cdata;
PyObject_HEAD
at::Generator cdata;
};
// Creates a new Python object wrapping the default at::Generator. The reference

View File

@ -9,7 +9,8 @@
const int LAYOUT_NAME_LEN = 64;
struct THPLayout {
PyObject_HEAD at::Layout layout;
PyObject_HEAD
at::Layout layout;
// NOLINTNEXTLINE(cppcoreguidelines-avoid-c-arrays,modernize-avoid-c-arrays)
char name[LAYOUT_NAME_LEN + 1];
};

View File

@ -9,7 +9,8 @@
const int MEMORY_FORMAT_NAME_LEN = 64;
struct THPMemoryFormat {
PyObject_HEAD at::MemoryFormat memory_format;
PyObject_HEAD
at::MemoryFormat memory_format;
// NOLINTNEXTLINE(cppcoreguidelines-avoid-c-arrays,modernize-avoid-c-arrays)
char name[MEMORY_FORMAT_NAME_LEN + 1];
};

View File

@ -9,7 +9,8 @@
constexpr int QSCHEME_NAME_LEN = 64;
struct THPQScheme {
PyObject_HEAD at::QScheme qscheme;
PyObject_HEAD
at::QScheme qscheme;
// NOLINTNEXTLINE(cppcoreguidelines-avoid-c-arrays,modernize-avoid-c-arrays)
char name[QSCHEME_NAME_LEN + 1];
};

View File

@ -6,7 +6,8 @@
#include <torch/csrc/python_headers.h>
struct THPStream {
PyObject_HEAD int64_t stream_id;
PyObject_HEAD
int64_t stream_id;
int64_t device_type;
int64_t device_index;
};

View File

@ -5,7 +5,8 @@
#include <ATen/ATen.h>
struct THPDTypeInfo {
PyObject_HEAD at::ScalarType type;
PyObject_HEAD
at::ScalarType type;
};
struct THPFInfo : THPDTypeInfo {};

View File

@ -11,7 +11,8 @@
namespace torch::autograd {
struct THPCppFunction {
PyObject_HEAD std::shared_ptr<Node> cdata;
PyObject_HEAD
std::shared_ptr<Node> cdata;
};
template <typename Ctor>

View File

@ -95,7 +95,7 @@ inline bool ensure_tuple(THPObjectPtr& obj) {
struct THPFunction {
PyObject_HEAD
PyObject* needs_input_grad;
PyObject* needs_input_grad;
// Python tuple of tensors whose variables we should save. Set
// by Python with 'save_for_backward'. If nullptr, no tensors were

View File

@ -46,7 +46,8 @@
// Manually create _PyTupleIterObject struct
typedef struct {
PyObject_HEAD Py_ssize_t it_index;
PyObject_HEAD
Py_ssize_t it_index;
PyTupleObject* it_seq; /* Set to NULL when iterator is exhausted */
} _PyTupleIterObject;

View File

@ -8,7 +8,8 @@
///////////////////////////////
struct NodeBase {
PyObject_HEAD bool _erased;
PyObject_HEAD
bool _erased;
NodeBase* _prev;
NodeBase* _next;
};
@ -111,7 +112,8 @@ bool NodeBase_init(PyObject* module) {
////////////////////////////////
struct NodeIter {
PyObject_HEAD bool _reversed;
PyObject_HEAD
bool _reversed;
NodeBase* _root;
NodeBase* _cur;
};

View File

@ -12,7 +12,8 @@
#include <torch/csrc/utils/pybind.h>
struct THPCapturedTraceback {
PyObject_HEAD std::shared_ptr<torch::CapturedTraceback> data;
PyObject_HEAD
std::shared_ptr<torch::CapturedTraceback> data;
};
static int THPCapturedTraceback_traverse(
@ -136,7 +137,8 @@ namespace torch::profiler {
namespace {
struct RecordFunctionFast {
PyObject_HEAD PyObject* name;
PyObject_HEAD
PyObject* name;
PyObject* input_values;
PyObject* keyword_values;
std::unique_ptr<at::RecordFunction> guard;

View File

@ -34,8 +34,8 @@ void set_disabled_torch_dispatch_impl(PyObject* value) {
typedef struct {
PyObject_HEAD
/* Type-specific fields go here. */
at::impl::TorchFunctionDisabledState old_state;
/* Type-specific fields go here. */
at::impl::TorchFunctionDisabledState old_state;
} DisableTorchFunctionSubclass;
PyObject* DisableTorchFunctionSubclass__enter(
@ -131,8 +131,8 @@ PyObject* THPModule_DisableTorchFunctionSubclassType() {
typedef struct {
PyObject_HEAD
/* Type-specific fields go here. */
at::impl::TorchFunctionDisabledState old_state;
/* Type-specific fields go here. */
at::impl::TorchFunctionDisabledState old_state;
} DisableTorchFunction;
PyObject* DisableTorchFunction__enter(PyObject* self, PyObject* unused) {