mirror of
https://github.com/pytorch/pytorch.git
synced 2025-11-11 22:34:53 +08:00
[ONNX] Pretty print diagnostic logging (#88261)
Adds pretty print diagnostic logging. For example
```python
import io
import torch
from torch.onnx._internal import diagnostics
class CustomAdd(torch.autograd.Function):
@staticmethod
def forward(ctx, x, y):
return x + y
@staticmethod
def symbolic(g, x, y):
return g.op("custom::CustomAdd", x, y)
class M(torch.nn.Module):
def forward(self, x):
return CustomAdd.apply(x, x)
# trigger warning for missing shape inference.
# rule = diagnostics.rules.node_missing_onnx_shape_inference
torch.onnx.export(M(), torch.randn(3, 4), io.BytesIO())
```
By default, observe minimum summary of diagnostics
```
========= Diagnostic Run torch.onnx.export version 1.14.0a0+git90a69c5 =========
verbose: False, log level: Level.ERROR
======================= 0 NONE 0 NOTE 3 WARNING 0 ERROR ========================
3 WARNING were not printed due to the log level.
```
Adjusting the `verbose` and `level` argument.
```python
diagnostics.engine.pretty_print(verbose=True, level=diagnostics.levels.WARNING)
```
Prints full log.
```
=============================== 1 Diagnostic Run ===============================
========= Diagnostic Run torch.onnx.export version 1.14.0a0+git90a69c5 =========
verbose: True, log level: Level.WARNING
======================= 0 NONE 0 NOTE 3 WARNING 0 ERROR ========================
WARNING: node-missing-onnx-shape-inference
==========================================
The shape inference of custom::CustomAdd type is missing, so it may result in wrong shape inference for the exported graph. Please consider adding it in symbolic function.
--------------------------- Stack: Python call stack ---------------------------
frame: diagnostic = ExportDiagnostic(rule, level, message, **kwargs) /home/bowbao/pytorch_dev/torch/onnx/_internal/diagnostics/_diagnostic.py:151
frame: n, utils._params_dict, GLOBALS.export_onnx_opset_version /home/bowbao/pytorch_dev/torch/onnx/_patch_torch.py:82
frame: <@beartype(torch.onnx._patch_torch._graph_op) at 0x7f62184b6710>:78
frame: return beartyped(*args, **kwargs) /home/bowbao/pytorch_dev/torch/onnx/_internal/_beartype.py:81
frame: return function(*args, **kwargs) /home/bowbao/pytorch_dev/torch/onnx/_deprecation.py:30
frame: return g.op("custom::CustomAdd", x, y) test_pretty_print.py:14
frame: return symbolic_fn(g, *args) /home/bowbao/pytorch_dev/torch/onnx/utils.py:1716
frame: return beartyped(*args, **kwargs) /home/bowbao/pytorch_dev/torch/onnx/_internal/_beartype.py:81
frame: graph = _C._jit_pass_onnx(graph, operator_export_type) /home/bowbao/pytorch_dev/torch/onnx/utils.py:663
frame: <@beartype(torch.onnx.utils._optimize_graph) at 0x7f62180e05f0>:85
frame: return beartyped(*args, **kwargs) /home/bowbao/pytorch_dev/torch/onnx/_internal/_beartype.py:81
frame: module=module, /home/bowbao/pytorch_dev/torch/onnx/utils.py:1123
frame: return beartyped(*args, **kwargs) /home/bowbao/pytorch_dev/torch/onnx/_internal/_beartype.py:81
frame: dynamic_axes=dynamic_axes, /home/bowbao/pytorch_dev/torch/onnx/utils.py:1539
frame: return beartyped(*args, **kwargs) /home/bowbao/pytorch_dev/torch/onnx/_internal/_beartype.py:81
frame: export_modules_as_functions=export_modules_as_functions, /home/bowbao/pytorch_dev/torch/onnx/utils.py:519
frame: <@beartype(torch.onnx.utils.export) at 0x7f62180e0170>:347
frame: return beartyped(*args, **kwargs) /home/bowbao/pytorch_dev/torch/onnx/_internal/_beartype.py:81
frame: torch.onnx.export(M(), torch.randn(3, 4), io.BytesIO()) test_pretty_print.py:22
---------------------------- Stack: C++ call stack -----------------------------
frame: (<unknown frame>)
frame: (<unknown function> + 0x88411b (0x7f625b36011b in /home/bowbao/pytorch_dev/torch/lib/libtorch_python.so))
frame: (torch::jit::UpdateReliable(torch::jit::Value*, std::pair<bool, bool> const&) + 0x7d3 (0x7f625b351743 in /home/bowbao/pytorch_dev/torch/lib/libtorch_python.so))
frame: (torch::jit::UpdateReliable(torch::jit::Node*) + 0x4f (0x7f625b35198f in /home/bowbao/pytorch_dev/torch/lib/libtorch_python.so))
frame: (torch::jit::ONNXShapeTypeInference(torch::jit::Node*, std::map<std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> >, c10::IValue, std::less<std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> > >, std::allocator<std::pair<std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> > const, c10::IValue> > > const&, int) + 0xac9 (0x7f625b357179 in /home/bowbao/pytorch_dev/torch/lib/libtorch_python.so))
frame: (<unknown function> + 0xabd026 (0x7f625b599026 in /home/bowbao/pytorch_dev/torch/lib/libtorch_python.so))
frame: (<unknown function> + 0x3c0fda (0x7f625ae9cfda in /home/bowbao/pytorch_dev/torch/lib/libtorch_python.so))
frame: (<unknown frame>)
WARNING: node-missing-onnx-shape-inference
==========================================
The shape inference of custom::CustomAdd type is missing, so it may result in wrong shape inference for the exported graph. Please consider adding it in symbolic function.
--------------------------- Stack: Python call stack ---------------------------
frame: diagnostic = ExportDiagnostic(rule, level, message, **kwargs) /home/bowbao/pytorch_dev/torch/onnx/_internal/diagnostics/_diagnostic.py:151
frame: graph, params_dict, GLOBALS.export_onnx_opset_version /home/bowbao/pytorch_dev/torch/onnx/utils.py:688
frame: <@beartype(torch.onnx.utils._optimize_graph) at 0x7f62180e05f0>:85
frame: return beartyped(*args, **kwargs) /home/bowbao/pytorch_dev/torch/onnx/_internal/_beartype.py:81
frame: module=module, /home/bowbao/pytorch_dev/torch/onnx/utils.py:1123
frame: return beartyped(*args, **kwargs) /home/bowbao/pytorch_dev/torch/onnx/_internal/_beartype.py:81
frame: dynamic_axes=dynamic_axes, /home/bowbao/pytorch_dev/torch/onnx/utils.py:1539
frame: return beartyped(*args, **kwargs) /home/bowbao/pytorch_dev/torch/onnx/_internal/_beartype.py:81
frame: export_modules_as_functions=export_modules_as_functions, /home/bowbao/pytorch_dev/torch/onnx/utils.py:519
frame: <@beartype(torch.onnx.utils.export) at 0x7f62180e0170>:347
frame: return beartyped(*args, **kwargs) /home/bowbao/pytorch_dev/torch/onnx/_internal/_beartype.py:81
frame: torch.onnx.export(M(), torch.randn(3, 4), io.BytesIO()) test_pretty_print.py:22
---------------------------- Stack: C++ call stack -----------------------------
frame: (<unknown frame>)
frame: (<unknown function> + 0x88411b (0x7f625b36011b in /home/bowbao/pytorch_dev/torch/lib/libtorch_python.so))
frame: (torch::jit::UpdateReliable(torch::jit::Value*, std::pair<bool, bool> const&) + 0x7d3 (0x7f625b351743 in /home/bowbao/pytorch_dev/torch/lib/libtorch_python.so))
frame: (torch::jit::UpdateReliable(torch::jit::Node*) + 0x4f (0x7f625b35198f in /home/bowbao/pytorch_dev/torch/lib/libtorch_python.so))
frame: (torch::jit::ONNXShapeTypeInference(torch::jit::Node*, std::map<std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> >, c10::IValue, std::less<std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> > >, std::allocator<std::pair<std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> > const, c10::IValue> > > const&, int) + 0xac9 (0x7f625b357179 in /home/bowbao/pytorch_dev/torch/lib/libtorch_python.so))
frame: (<unknown function> + 0x87d6d1 (0x7f625b3596d1 in /home/bowbao/pytorch_dev/torch/lib/libtorch_python.so))
frame: (torch::jit::ONNXShapeTypeInference(std::shared_ptr<torch::jit::Graph>&, std::map<std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> >, c10::IValue, std::less<std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> > >, std::allocator<std::pair<std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> > const, c10::IValue> > > const&, int) + 0x33 (0x7f625b359cf3 in /home/bowbao/pytorch_dev/torch/lib/libtorch_python.so))
frame: (<unknown function> + 0xabdbae (0x7f625b599bae in /home/bowbao/pytorch_dev/torch/lib/libtorch_python.so))
frame: (<unknown function> + 0x3c0fda (0x7f625ae9cfda in /home/bowbao/pytorch_dev/torch/lib/libtorch_python.so))
frame: (<unknown frame>)
WARNING: node-missing-onnx-shape-inference
==========================================
The shape inference of custom::CustomAdd type is missing, so it may result in wrong shape inference for the exported graph. Please consider adding it in symbolic function.
--------------------------- Stack: Python call stack ---------------------------
frame: diagnostic = ExportDiagnostic(rule, level, message, **kwargs) /home/bowbao/pytorch_dev/torch/onnx/_internal/diagnostics/_diagnostic.py:151
frame: graph, params_dict, GLOBALS.export_onnx_opset_version /home/bowbao/pytorch_dev/torch/onnx/utils.py:1179
frame: return beartyped(*args, **kwargs) /home/bowbao/pytorch_dev/torch/onnx/_internal/_beartype.py:81
frame: dynamic_axes=dynamic_axes, /home/bowbao/pytorch_dev/torch/onnx/utils.py:1539
frame: return beartyped(*args, **kwargs) /home/bowbao/pytorch_dev/torch/onnx/_internal/_beartype.py:81
frame: export_modules_as_functions=export_modules_as_functions, /home/bowbao/pytorch_dev/torch/onnx/utils.py:519
frame: <@beartype(torch.onnx.utils.export) at 0x7f62180e0170>:347
frame: return beartyped(*args, **kwargs) /home/bowbao/pytorch_dev/torch/onnx/_internal/_beartype.py:81
frame: torch.onnx.export(M(), torch.randn(3, 4), io.BytesIO()) test_pretty_print.py:22
---------------------------- Stack: C++ call stack -----------------------------
frame: (<unknown frame>)
frame: (<unknown function> + 0x88411b (0x7f625b36011b in /home/bowbao/pytorch_dev/torch/lib/libtorch_python.so))
frame: (torch::jit::UpdateReliable(torch::jit::Value*, std::pair<bool, bool> const&) + 0x7d3 (0x7f625b351743 in /home/bowbao/pytorch_dev/torch/lib/libtorch_python.so))
frame: (torch::jit::UpdateReliable(torch::jit::Node*) + 0x4f (0x7f625b35198f in /home/bowbao/pytorch_dev/torch/lib/libtorch_python.so))
frame: (torch::jit::ONNXShapeTypeInference(torch::jit::Node*, std::map<std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> >, c10::IValue, std::less<std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> > >, std::allocator<std::pair<std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> > const, c10::IValue> > > const&, int) + 0xac9 (0x7f625b357179 in /home/bowbao/pytorch_dev/torch/lib/libtorch_python.so))
frame: (<unknown function> + 0x87d6d1 (0x7f625b3596d1 in /home/bowbao/pytorch_dev/torch/lib/libtorch_python.so))
frame: (torch::jit::ONNXShapeTypeInference(std::shared_ptr<torch::jit::Graph>&, std::map<std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> >, c10::IValue, std::less<std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> > >, std::allocator<std::pair<std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> > const, c10::IValue> > > const&, int) + 0x33 (0x7f625b359cf3 in /home/bowbao/pytorch_dev/torch/lib/libtorch_python.so))
frame: (<unknown function> + 0xabdbae (0x7f625b599bae in /home/bowbao/pytorch_dev/torch/lib/libtorch_python.so))
frame: (<unknown function> + 0x3c0fda (0x7f625ae9cfda in /home/bowbao/pytorch_dev/torch/lib/libtorch_python.so))
frame: (<unknown frame>)
```
Pull Request resolved: https://github.com/pytorch/pytorch/pull/88261
Approved by: https://github.com/abock, https://github.com/justinchuby
This commit is contained in:
committed by
PyTorch MergeBot
parent
ea0ec9d71c
commit
fbc1878265
@ -19,7 +19,7 @@ def _assert_has_diagnostics(
|
||||
rule_level_pairs: AbstractSet[Tuple[infra.Rule, infra.Level]],
|
||||
):
|
||||
sarif_log = engine.sarif_log()
|
||||
unseen_pairs = {(rule.id, level.value) for rule, level in rule_level_pairs}
|
||||
unseen_pairs = {(rule.id, level.name.lower()) for rule, level in rule_level_pairs}
|
||||
actual_results = []
|
||||
for run in sarif_log.runs:
|
||||
if run.results is None:
|
||||
|
||||
@ -74,22 +74,6 @@ class ExportDiagnostic(infra.Diagnostic):
|
||||
self.with_stack(stack)
|
||||
self.cpp_call_stack = stack
|
||||
|
||||
def with_model_source_location(
|
||||
self: _ExportDiagnostic,
|
||||
) -> _ExportDiagnostic:
|
||||
# TODO: Implement this.
|
||||
# self.locations.append(...)
|
||||
raise NotImplementedError()
|
||||
return self
|
||||
|
||||
def with_export_source_location(
|
||||
self: _ExportDiagnostic,
|
||||
) -> _ExportDiagnostic:
|
||||
# TODO: Implement this.
|
||||
# self.locations.append(...)
|
||||
raise NotImplementedError()
|
||||
return self
|
||||
|
||||
|
||||
class ExportDiagnosticEngine(infra.DiagnosticEngine):
|
||||
"""PyTorch ONNX Export diagnostic engine.
|
||||
@ -115,7 +99,6 @@ class ExportDiagnosticEngine(infra.DiagnosticEngine):
|
||||
name="torch.onnx",
|
||||
version=torch.__version__,
|
||||
diagnostic_type=ExportDiagnostic,
|
||||
options=None,
|
||||
)
|
||||
|
||||
@property
|
||||
@ -150,6 +133,7 @@ def create_export_diagnostic_context():
|
||||
try:
|
||||
yield context
|
||||
finally:
|
||||
context.pretty_print(context.options.log_verbose, context.options.log_level)
|
||||
context = engine.background_context
|
||||
|
||||
|
||||
|
||||
@ -17,10 +17,10 @@ class Level(enum.Enum):
|
||||
please use infra.Tag instead.
|
||||
"""
|
||||
|
||||
NONE = "none"
|
||||
NOTE = "note"
|
||||
WARNING = "warning"
|
||||
ERROR = "error"
|
||||
NONE = enum.auto()
|
||||
NOTE = enum.auto()
|
||||
WARNING = enum.auto()
|
||||
ERROR = enum.auto()
|
||||
|
||||
|
||||
levels = Level
|
||||
@ -107,6 +107,9 @@ class Rule:
|
||||
"""
|
||||
return self.message_default_template.format(*args, **kwargs)
|
||||
|
||||
def pretty_print(self):
|
||||
pass
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class Location:
|
||||
@ -134,6 +137,25 @@ class Location:
|
||||
else None,
|
||||
)
|
||||
|
||||
def pretty_print(self):
|
||||
"""Prints the location in a human-readable format."""
|
||||
location_strs = ["frame:"]
|
||||
if self.snippet is not None:
|
||||
location_strs.append(self.snippet)
|
||||
if self.uri is not None:
|
||||
line_strs = [self.uri]
|
||||
line_strs.append(str(self.line)) if self.line is not None else "-1"
|
||||
line_strs.append(
|
||||
str(self.start_column)
|
||||
) if self.start_column is not None else "-1"
|
||||
line_strs.append(
|
||||
str(self.end_column)
|
||||
) if self.end_column is not None else "-1"
|
||||
location_strs.append(":".join(line_strs))
|
||||
if self.message is not None:
|
||||
location_strs.append(f"({self.message})")
|
||||
print(" ".join(location_strs))
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class StackFrame:
|
||||
@ -143,6 +165,10 @@ class StackFrame:
|
||||
"""Returns the SARIF representation of this stack frame."""
|
||||
return sarif.StackFrame(location=self.location.sarif())
|
||||
|
||||
def pretty_print(self):
|
||||
"""Prints the stack frame in a human-readable format."""
|
||||
self.location.pretty_print()
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class Stack:
|
||||
@ -158,6 +184,12 @@ class Stack:
|
||||
else None,
|
||||
)
|
||||
|
||||
def pretty_print(self):
|
||||
"""Prints the stack in a human-readable format."""
|
||||
formatter.pretty_print_title(f"Stack: {self.message}", fill_char="-")
|
||||
for frame in self.frames:
|
||||
frame.pretty_print()
|
||||
|
||||
|
||||
# This is a workaround for mypy not supporting Self from typing_extensions.
|
||||
_Diagnostic = TypeVar("_Diagnostic", bound="Diagnostic")
|
||||
@ -182,6 +214,9 @@ class Graph:
|
||||
properties=PatchedPropertyBag(name=self.name, description=self.description),
|
||||
)
|
||||
|
||||
def pretty_print(self):
|
||||
pass
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class Diagnostic:
|
||||
@ -201,7 +236,7 @@ class Diagnostic:
|
||||
message = f"{message}\n{self.additional_message}"
|
||||
sarif_result = sarif.Result(
|
||||
message=sarif.Message(text=message),
|
||||
level=self.level.value,
|
||||
level=self.level.name.lower(), # type: ignore[arg-type]
|
||||
rule_id=self.rule.id,
|
||||
)
|
||||
sarif_result.locations = [location.sarif() for location in self.locations]
|
||||
@ -235,6 +270,31 @@ class Diagnostic:
|
||||
self.additional_message = f"{self.additional_message}\n{message}"
|
||||
return self
|
||||
|
||||
def pretty_print(self, verbose: bool = False, log_level: Level = Level.ERROR):
|
||||
"""Prints the diagnostics in a human-readable format.
|
||||
|
||||
Args:
|
||||
verbose: If True, prints all information. E.g. stack frames, graphs, etc.
|
||||
Otherwise, only prints compact information. E.g., rule name and display message.
|
||||
level: The minimum level of diagnostics to print.
|
||||
"""
|
||||
if self.level.value < log_level.value:
|
||||
return
|
||||
formatter.pretty_print_item_title(f"{self.level.name}: {self.rule.name}")
|
||||
print(self.message)
|
||||
|
||||
if not verbose:
|
||||
print("<Set verbose=True to see more details>\n")
|
||||
return
|
||||
|
||||
for location in self.locations:
|
||||
location.pretty_print()
|
||||
for stack in self.stacks:
|
||||
stack.pretty_print()
|
||||
for graph in self.graphs:
|
||||
graph.pretty_print()
|
||||
print()
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class RuleCollection:
|
||||
@ -284,12 +344,15 @@ class DiagnosticOptions:
|
||||
Options for diagnostic context.
|
||||
"""
|
||||
|
||||
log_verbose: bool = dataclasses.field(default=False)
|
||||
log_level: Level = dataclasses.field(default=Level.ERROR)
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class DiagnosticContext:
|
||||
name: str
|
||||
version: str
|
||||
options: Optional[DiagnosticOptions] = None
|
||||
options: DiagnosticOptions = dataclasses.field(default_factory=DiagnosticOptions)
|
||||
diagnostic_type: Type[Diagnostic] = dataclasses.field(default=Diagnostic)
|
||||
diagnostics: List[Diagnostic] = dataclasses.field(init=False, default_factory=list)
|
||||
_invocation: Invocation = dataclasses.field(init=False)
|
||||
@ -350,3 +413,38 @@ class DiagnosticContext:
|
||||
diagnostic = self.diagnostic_type(rule, level, message, **kwargs)
|
||||
self.add_diagnostic(diagnostic)
|
||||
return diagnostic
|
||||
|
||||
def pretty_print(
|
||||
self, verbose: bool = False, log_level: Level = Level.ERROR
|
||||
) -> None:
|
||||
"""Prints the diagnostics in a human-readable format.
|
||||
|
||||
Args:
|
||||
verbose: Whether to print the diagnostics in verbose mode. See Diagnostic.pretty_print.
|
||||
level: The minimum level of diagnostics to print.
|
||||
"""
|
||||
formatter.pretty_print_title(
|
||||
f"Diagnostic Run {self.name} version {self.version}"
|
||||
)
|
||||
print(f"verbose: {verbose}, log level: {log_level}")
|
||||
diagnostic_stats = {level: 0 for level in Level}
|
||||
for diagnostic in self.diagnostics:
|
||||
diagnostic_stats[diagnostic.level] += 1
|
||||
formatter.pretty_print_title(
|
||||
" ".join(f"{diagnostic_stats[level]} {level.name}" for level in Level)
|
||||
)
|
||||
|
||||
for diagnostic in self.diagnostics:
|
||||
diagnostic.pretty_print(verbose, log_level)
|
||||
|
||||
unprinted_diagnostic_stats = [
|
||||
(level, count)
|
||||
for level, count in diagnostic_stats.items()
|
||||
if count > 0 and level.value < log_level.value
|
||||
]
|
||||
if unprinted_diagnostic_stats:
|
||||
print(
|
||||
f"{' '.join(f'{count} {level.name}' for level, count in unprinted_diagnostic_stats)} "
|
||||
"were not printed due to the log level."
|
||||
)
|
||||
print()
|
||||
|
||||
@ -85,8 +85,23 @@ class DiagnosticEngine:
|
||||
Returns:
|
||||
A new diagnostic context.
|
||||
"""
|
||||
if options is None:
|
||||
options = infra.DiagnosticOptions()
|
||||
context = infra.DiagnosticContext(
|
||||
name, version, options, diagnostic_type=diagnostic_type
|
||||
)
|
||||
self.contexts.append(context)
|
||||
return context
|
||||
|
||||
def pretty_print(
|
||||
self, verbose: bool = False, level: infra.Level = infra.Level.ERROR
|
||||
) -> None:
|
||||
"""Pretty prints all diagnostics in the diagnostic contexts.
|
||||
|
||||
Args:
|
||||
verbose: Whether to print the diagnostics in verbose mode. See Diagnostic.pretty_print.
|
||||
level: The minimum level of diagnostics to print.
|
||||
"""
|
||||
formatter.pretty_print_title(f"{len(self.contexts)} Diagnostic Run")
|
||||
for context in self.contexts:
|
||||
context.pretty_print(verbose, level)
|
||||
|
||||
@ -57,3 +57,21 @@ def sarif_to_json(attr_cls_obj: _SarifClass) -> str:
|
||||
dict = dataclasses.asdict(attr_cls_obj)
|
||||
dict = _convert_key(dict, _camel_case_to_snake_case)
|
||||
return json.dumps(dict, indent=4)
|
||||
|
||||
|
||||
def pretty_print_title(title: str, width: int = 80, fill_char: str = "=") -> None:
|
||||
"""Pretty prints title in below format:
|
||||
|
||||
==================== title ====================
|
||||
"""
|
||||
print(f" {title} ".center(width, fill_char))
|
||||
|
||||
|
||||
def pretty_print_item_title(title: str, fill_char: str = "=") -> None:
|
||||
"""Pretty prints title in below format:
|
||||
|
||||
title
|
||||
=====
|
||||
"""
|
||||
print(title)
|
||||
print(fill_char * len(title))
|
||||
|
||||
@ -6,7 +6,7 @@ from torch.onnx._internal.diagnostics.infra import _infra
|
||||
def python_frame(frame: inspect.FrameInfo) -> _infra.StackFrame:
|
||||
"""Returns a StackFrame for the given inspect.FrameInfo."""
|
||||
snippet = (
|
||||
frame.code_context[frame.index]
|
||||
frame.code_context[frame.index].strip()
|
||||
if frame.code_context is not None and frame.index is not None
|
||||
else None
|
||||
)
|
||||
|
||||
Reference in New Issue
Block a user