mirror of
https://github.com/pytorch/pytorch.git
synced 2025-10-20 12:54:11 +08:00
Fixes some part of #162852 and #164878. These two issues have some relationship though. * __->__ #165430 Pull Request resolved: https://github.com/pytorch/pytorch/pull/165430 Approved by: https://github.com/Lucaskabela, https://github.com/williamwen42 Co-authored-by: Lucas Kabela <lucasakabela@gmail.com>
2815 lines
112 KiB
JSON
2815 lines
112 KiB
JSON
{
|
|
"GB0000": [
|
|
{
|
|
"Gb_type": "All __torch_function__ overrides returned NotImplemented due to TypeError from user code",
|
|
"Context": "fn={fn}, args={args}, kwargs={kwargs}",
|
|
"Explanation": "All __torch_function__ overrides for for function {fn} returned NotImplemented",
|
|
"Hints": [
|
|
"Dynamo has detected that tracing the code will result in an error when running in eager. Please double check that your code doesn't contain a similar error when actually running eager/uncompiled."
|
|
]
|
|
}
|
|
],
|
|
"GB0001": [
|
|
{
|
|
"Gb_type": "Argument of `as_subclass` must be a non-dispatcher-style tensor subclass",
|
|
"Context": "{self}.as_subclass({cls})",
|
|
"Explanation": "Currently not supported",
|
|
"Hints": [
|
|
"Avoid this call or move it outside `torch.compile` regione",
|
|
"It may be possible to write Dynamo tracing rules for this code. Please report an issue to PyTorch if you encounter this graph break often and it is causing performance issues."
|
|
]
|
|
}
|
|
],
|
|
"GB0002": [
|
|
{
|
|
"Gb_type": "Assertion failed on symbolic shapes",
|
|
"Context": "str(sym_expr)",
|
|
"Explanation": "",
|
|
"Hints": [
|
|
"Dynamo has detected that tracing the code will result in an error when running in eager. Please double check that your code doesn't contain a similar error when actually running eager/uncompiled."
|
|
]
|
|
}
|
|
],
|
|
"GB0003": [
|
|
{
|
|
"Gb_type": "Attempt to trace generator",
|
|
"Context": "",
|
|
"Explanation": "Generators cannot be compiled directly with `torch.compile`.",
|
|
"Hints": [
|
|
"Call a generator from inside of a non-generator Python function and ",
|
|
"compile that function instead.",
|
|
"This graph break is fundamental - it is unlikely that Dynamo will ever be able to trace through your code. Consider finding a workaround."
|
|
]
|
|
}
|
|
],
|
|
"GB0004": [
|
|
{
|
|
"Gb_type": "Attempted super().__delattr__() on an object without mutation tracking",
|
|
"Context": "call_method {self} {name}",
|
|
"Explanation": "Dynamo needs to track mutations on an object before `super().__delattr__` can be used on it. But the object ({self.objvar}) doesn't have attribute mutation tracking enabled.",
|
|
"Hints": [
|
|
"Ensure the object is tracked by Dynamo's side effect system.",
|
|
"This is likely to be a Dynamo bug. Please report an issue to PyTorch."
|
|
]
|
|
}
|
|
],
|
|
"GB0005": [
|
|
{
|
|
"Gb_type": "Attempted to a str() method implemented in C/C++",
|
|
"Context": "",
|
|
"Explanation": "{type(arg.value)} has a C/C++ based str method. This is not supported.",
|
|
"Hints": [
|
|
"Write the str method in Python"
|
|
]
|
|
}
|
|
],
|
|
"GB0006": [
|
|
{
|
|
"Gb_type": "Attempted to call a super() attribute that is not a function or method",
|
|
"Context": "call_method {self} {name}",
|
|
"Explanation": "Dynamo does not know how to trace the call `super().{name}()` because `super().{name}` is not a function or method attribute.",
|
|
"Hints": [
|
|
"Ensure the attribute accessed via `super()` is a standard method or function."
|
|
]
|
|
}
|
|
],
|
|
"GB0007": [
|
|
{
|
|
"Gb_type": "Attempted to call function marked as skipped",
|
|
"Context": "module: {module_name}, qualname: {qualname}, skip reason: {reason}",
|
|
"Explanation": "explanation",
|
|
"Hints": []
|
|
}
|
|
],
|
|
"GB0008": [
|
|
{
|
|
"Gb_type": "Attempted to inline function marked as skipped",
|
|
"Context": "qualname: {fn_qualname}, name: {func.get_name()}, filename: `{func.get_filename()}`, skip reason: {result.reason}",
|
|
"Explanation": "Dynamo developers have intentionally marked that the function `{fn_qualname}` should not be traced.",
|
|
"Hints": []
|
|
}
|
|
],
|
|
"GB0009": [
|
|
{
|
|
"Gb_type": "Attempted to inline function marked as skipped (SkipFunctionVariable)",
|
|
"Context": "Attempted to inline a SkipFunctionVariable {func}",
|
|
"Explanation": "Attempted to inline a function that was previously determined to be marked as intentionally skipped.",
|
|
"Hints": []
|
|
}
|
|
],
|
|
"GB0010": [
|
|
{
|
|
"Gb_type": "Attempted to read a deleted variable",
|
|
"Context": "item: {item}, name: {name}",
|
|
"Explanation": "",
|
|
"Hints": [
|
|
"Dynamo has detected that tracing the code will result in an error when running in eager. Please double check that your code doesn't contain a similar error when actually running eager/uncompiled."
|
|
]
|
|
}
|
|
],
|
|
"GB0011": [
|
|
{
|
|
"Gb_type": "Attempted to read undefined local variable",
|
|
"Context": "LOAD_FAST {name}",
|
|
"Explanation": "Could not find a local variable with name `{name}`",
|
|
"Hints": [
|
|
"Dynamo has detected that tracing the code will result in an error when running in eager. Please double check that your code doesn't contain a similar error when actually running eager/uncompiled."
|
|
]
|
|
}
|
|
],
|
|
"GB0012": [
|
|
{
|
|
"Gb_type": "Attempted to read undefined local variable (implicit)",
|
|
"Context": "LOAD_FAST {name}",
|
|
"Explanation": "Could not find an implicit local variable with name `{name}`",
|
|
"Hints": [
|
|
"This happens in dict/list comprehensions",
|
|
"Dynamo has detected that tracing the code will result in an error when running in eager. Please double check that your code doesn't contain a similar error when actually running eager/uncompiled."
|
|
]
|
|
}
|
|
],
|
|
"GB0013": [
|
|
{
|
|
"Gb_type": "Attempted to represent unregistered RemovableHandle",
|
|
"Context": "",
|
|
"Explanation": "Dynamo attempted to build a representation of a torch.utils.hooks.RemovableHandle, which is not supported. This happens because the RemovableHandle was created in another frame.",
|
|
"Hints": []
|
|
}
|
|
],
|
|
"GB0014": [
|
|
{
|
|
"Gb_type": "Attempted to wrap RNN, GRU, or LSTM",
|
|
"Context": "str(value)",
|
|
"Explanation": "Dynamo does not support RNN, GRU, or LSTM.",
|
|
"Hints": [
|
|
"It may be possible to write Dynamo tracing rules for this code. Please report an issue to PyTorch if you encounter this graph break often and it is causing performance issues."
|
|
]
|
|
}
|
|
],
|
|
"GB0015": [
|
|
{
|
|
"Gb_type": "Attempted to wrap sparse Tensor",
|
|
"Context": "",
|
|
"Explanation": "torch.compile does not support sparse Tensors",
|
|
"Hints": [
|
|
"It may be possible to write Dynamo tracing rules for this code. Please report an issue to PyTorch if you encounter this graph break often and it is causing performance issues."
|
|
]
|
|
}
|
|
],
|
|
"GB0016": [
|
|
{
|
|
"Gb_type": "Attempted to wrap strided NestedTensor",
|
|
"Context": "",
|
|
"Explanation": "torch.compile does not support strided NestedTensor",
|
|
"Hints": []
|
|
}
|
|
],
|
|
"GB0017": [
|
|
{
|
|
"Gb_type": "Attempted to wrap torch._higher_order_ops.invoke_subgraph",
|
|
"Context": "",
|
|
"Explanation": "Directly using invoke_subgraph is not supported. Use nested_compile_region",
|
|
"Hints": []
|
|
}
|
|
],
|
|
"GB0018": [
|
|
{
|
|
"Gb_type": "Attempted to wrap unbacked SymInt",
|
|
"Context": "",
|
|
"Explanation": "Unbacked SymInt input is not supported yet.",
|
|
"Hints": [
|
|
"It may be possible to write Dynamo tracing rules for this code. Please report an issue to PyTorch if you encounter this graph break often and it is causing performance issues."
|
|
]
|
|
}
|
|
],
|
|
"GB0019": [
|
|
{
|
|
"Gb_type": "AutogradFunctionContextVariable escaped Dynamo-traced region",
|
|
"Context": "",
|
|
"Explanation": "We cannot reconstruct a torch.autograd.Function's context object.",
|
|
"Hints": []
|
|
}
|
|
],
|
|
"GB0020": [
|
|
{
|
|
"Gb_type": "BUILD_STRING key conflict",
|
|
"Context": "format_string_parts: {format_string_parts}, kwargs: {kwargs}, part.sym_kwargs: {part.sym_kwargs}",
|
|
"Explanation": "Failed to build format string due to key conflict",
|
|
"Hints": [
|
|
"Dynamo has detected that tracing the code will result in an error when running in eager. Please double check that your code doesn't contain a similar error when actually running eager/uncompiled."
|
|
]
|
|
}
|
|
],
|
|
"GB0021": [
|
|
{
|
|
"Gb_type": "BUILD_STRING type error",
|
|
"Context": "str(part)",
|
|
"Explanation": "Format string part type is not correct - expected constant or format string.",
|
|
"Hints": [
|
|
"Dynamo has detected that tracing the code will result in an error when running in eager. Please double check that your code doesn't contain a similar error when actually running eager/uncompiled."
|
|
]
|
|
}
|
|
],
|
|
"GB0022": [
|
|
{
|
|
"Gb_type": "Bad import result",
|
|
"Context": "typestr(value)",
|
|
"Explanation": "Import result is not a Python module.",
|
|
"Hints": []
|
|
}
|
|
],
|
|
"GB0023": [
|
|
{
|
|
"Gb_type": "Builtin `operator.*` comparison with constant `self` failed",
|
|
"Context": "call_method {self} {name} {args} {kwargs}",
|
|
"Explanation": "\"Failed to compare {self} with {other}, \" + f\"because {other} is not a Python constant or its mutation check fails.\"",
|
|
"Hints": []
|
|
}
|
|
],
|
|
"GB0024": [
|
|
{
|
|
"Gb_type": "CLEANUP_THROW with StopIteration",
|
|
"Context": "",
|
|
"Explanation": "Received StopIteration when handling generator.throw/close. This is not supported.",
|
|
"Hints": []
|
|
}
|
|
],
|
|
"GB0025": [
|
|
{
|
|
"Gb_type": "Call to `torch._dynamo.graph_break()`",
|
|
"Context": "Called `torch._dynamo.graph_break()` with args `{args}`, kwargs `{kwargs}`",
|
|
"Explanation": "User-inserted graph break. Message: {graph_break_msg}",
|
|
"Hints": [
|
|
"Remove the `torch._dynamo.graph_break()` call."
|
|
]
|
|
}
|
|
],
|
|
"GB0026": [
|
|
{
|
|
"Gb_type": "Calling subclass default constructor with more than tensor argument",
|
|
"Context": "{self.value}(args={args}, kwargs={kwargs})",
|
|
"Explanation": "Currently not supported",
|
|
"Hints": [
|
|
"Avoid this constructor call or move it outside ",
|
|
"`torch.compile` regione",
|
|
"It may be possible to write Dynamo tracing rules for this code. Please report an issue to PyTorch if you encounter this graph break often and it is causing performance issues."
|
|
]
|
|
}
|
|
],
|
|
"GB0027": [
|
|
{
|
|
"Gb_type": "Cannot check Tensor object identity without its fake value",
|
|
"Context": "str(fake_tensor)",
|
|
"Explanation": "TensorVariable is missing a fake example_value.",
|
|
"Hints": [
|
|
"This is likely to be a Dynamo bug. Please report an issue to PyTorch."
|
|
]
|
|
}
|
|
],
|
|
"GB0028": [
|
|
{
|
|
"Gb_type": "Caught non-Exception value",
|
|
"Context": "str(exc_instance)",
|
|
"Explanation": "Except expects to receive an object of Exception type but received {exc_instance}.",
|
|
"Hints": [
|
|
"Dynamo has detected that tracing the code will result in an error when running in eager. Please double check that your code doesn't contain a similar error when actually running eager/uncompiled."
|
|
]
|
|
}
|
|
],
|
|
"GB0029": [
|
|
{
|
|
"Gb_type": "Compilation of intermediate hooks requires compiled autograd",
|
|
"Context": "var_getattr {self} {name}",
|
|
"Explanation": "Dynamo must be in compiled_autograd to register hooks.",
|
|
"Hints": []
|
|
}
|
|
],
|
|
"GB0030": [
|
|
{
|
|
"Gb_type": "ComptimeContext graph break",
|
|
"Context": "msg",
|
|
"Explanation": "Manually triggered ComptimeContext graph break with message {msg}.",
|
|
"Hints": []
|
|
}
|
|
],
|
|
"GB0031": [
|
|
{
|
|
"Gb_type": "Custom __getattribute__ in nn.Module attribute access",
|
|
"Context": "var_getattr {self} {name}",
|
|
"Explanation": "Dynamo does not support checking key existence on `nn.Module` instances that have a custom `__getattribute__` method defined.",
|
|
"Hints": [
|
|
"Avoid defining `__getattribute__` in your module.",
|
|
"It may be possible to write Dynamo tracing rules for this code. Please report an issue to PyTorch if you encounter this graph break often and it is causing performance issues."
|
|
]
|
|
}
|
|
],
|
|
"GB0032": [
|
|
{
|
|
"Gb_type": "Custom __getattribute__ in nn.Module dict key check",
|
|
"Context": "has_key_in_generic_dict {self} {key}",
|
|
"Explanation": "Dynamo does not support checking key existence on `nn.Module` instances that have a custom `__getattribute__` method defined.",
|
|
"Hints": [
|
|
"Avoid defining `__getattribute__` in your module.",
|
|
"It may be possible to write Dynamo tracing rules for this code. Please report an issue to PyTorch if you encounter this graph break often and it is causing performance issues."
|
|
]
|
|
}
|
|
],
|
|
"GB0033": [
|
|
{
|
|
"Gb_type": "Data dependent operator",
|
|
"Context": "str(cause.func)",
|
|
"Explanation": "Operator `{cause.func}` has a non-Tensor output whose value is dependent on the data of Tensor inputs.",
|
|
"Hints": []
|
|
}
|
|
],
|
|
"GB0034": [
|
|
{
|
|
"Gb_type": "Data-dependent assertion failed (cannot compile partial graph)",
|
|
"Context": "value: {value}",
|
|
"Explanation": "Dynamo has determined when encountering a data-dependent assert failure that it should not compile the partial graph.",
|
|
"Hints": [
|
|
"Use `torch._assert()` to raise a hard AssertionError when the check fails. ",
|
|
"This error will propagate back the user code ",
|
|
"that called the compiled function (i.e. Dynamo will not trace any exception handling).",
|
|
"Remove the assert statement.",
|
|
"Move the assert statement outside of any context managers in order to graph break with ",
|
|
"partial graph compilation (if fullgraph=False).",
|
|
"This graph break is fundamental - it is unlikely that Dynamo will ever be able to trace through your code. Consider finding a workaround."
|
|
]
|
|
}
|
|
],
|
|
"GB0035": [
|
|
{
|
|
"Gb_type": "Data-dependent branching with non-constant __bool__",
|
|
"Context": "method: {x}, result: {result}",
|
|
"Explanation": "Attempted to perform data-dependent branching on a user-defined object with a __bool__ method that did not return a constant.",
|
|
"Hints": []
|
|
}
|
|
],
|
|
"GB0036": [
|
|
{
|
|
"Gb_type": "Dynamic shape operator",
|
|
"Context": "str(cause.func)",
|
|
"Explanation": "Operator `{cause.func}`'s output shape depends on input Tensor data.",
|
|
"Hints": [
|
|
"Enable tracing of dynamic shape operators with ",
|
|
"`torch._dynamo.config.capture_dynamic_output_shape_ops = True`"
|
|
]
|
|
}
|
|
],
|
|
"GB0037": [
|
|
{
|
|
"Gb_type": "Dynamic shape operator (no meta kernel)",
|
|
"Context": "str(cause.func)",
|
|
"Explanation": "Operator `{cause.func}` does not have a meta kernel that supports dynamic output shapes",
|
|
"Hints": [
|
|
"Please report an issue to PyTorch"
|
|
]
|
|
}
|
|
],
|
|
"GB0038": [
|
|
{
|
|
"Gb_type": "Dynamic slicing with Tensor arguments",
|
|
"Context": "SliceVariable start: {start}, stop: {stop}, step: {step}",
|
|
"Explanation": "Creating slices with Tensor arguments is not supported. e.g. `l[:x]`, where `x` is a 1-element tensor.",
|
|
"Hints": [
|
|
"It may be possible to write Dynamo tracing rules for this code. Please report an issue to PyTorch if you encounter this graph break often and it is causing performance issues."
|
|
]
|
|
}
|
|
],
|
|
"GB0039": [
|
|
{
|
|
"Gb_type": "Dynamo cache limit exceeded",
|
|
"Context": "Limit type: {limit_type}",
|
|
"Explanation": "Dynamo attempted to recompile the code object too many times, exceeding the {limit_type} cache size limit.Giving up on compiling as the compile time tradeoff is likely not worth the performance gain.",
|
|
"Hints": []
|
|
}
|
|
],
|
|
"GB0040": [
|
|
{
|
|
"Gb_type": "Encountered aliasing during higher order op tracing",
|
|
"Context": "context",
|
|
"Explanation": "Higher order ops do not support aliasing. Found in {source_target.name()}",
|
|
"Hints": [
|
|
"Replace `return input` with `return input.clone()` to avoid aliasing.",
|
|
"Consider using the debug context to change user code to avoid aliasing.",
|
|
"Please open an issue."
|
|
]
|
|
}
|
|
],
|
|
"GB0041": [
|
|
{
|
|
"Gb_type": "Encountered input mutation during higher order op tracing",
|
|
"Context": "context",
|
|
"Explanation": "Higher order ops do not support input mutation. Found in {source_target.name()}",
|
|
"Hints": [
|
|
"Consider using the debug context to change user code to avoid mutation.",
|
|
"Please open an issue."
|
|
]
|
|
}
|
|
],
|
|
"GB0042": [
|
|
{
|
|
"Gb_type": "Encountered non user function variable during invoke_subgraph HOP tracing",
|
|
"Context": "str(fn_vt)",
|
|
"Explanation": "invoke_subgraph does not support non user function variable",
|
|
"Hints": [
|
|
"It may be possible to write Dynamo tracing rules for this code. Please report an issue to PyTorch if you encounter this graph break often and it is causing performance issues."
|
|
]
|
|
}
|
|
],
|
|
"GB0043": [
|
|
{
|
|
"Gb_type": "Encountered non-PT2-compliant op",
|
|
"Context": "",
|
|
"Explanation": "msg + + err_epilogue",
|
|
"Hints": []
|
|
}
|
|
],
|
|
"GB0044": [
|
|
{
|
|
"Gb_type": "Encountered strided NestedTensor in automatic dynamic dim determination",
|
|
"Context": "",
|
|
"Explanation": "torch.compile does not support strided NestedTensor",
|
|
"Hints": []
|
|
}
|
|
],
|
|
"GB0045": [
|
|
{
|
|
"Gb_type": "Encountered tensor.is_inference() during tracing",
|
|
"Context": "",
|
|
"Explanation": "tensor.is_inference() is not supported",
|
|
"Hints": [
|
|
"This graph break is fundamental - it is unlikely that Dynamo will ever be able to trace through your code. Consider finding a workaround."
|
|
]
|
|
}
|
|
],
|
|
"GB0046": [
|
|
{
|
|
"Gb_type": "Encountered torch.is_inference_mode_enabled during tracing",
|
|
"Context": "",
|
|
"Explanation": "torch.is_inference_mode_enabled() is not supported",
|
|
"Hints": [
|
|
"This graph break is fundamental - it is unlikely that Dynamo will ever be able to trace through your code. Consider finding a workaround."
|
|
]
|
|
}
|
|
],
|
|
"GB0047": [
|
|
{
|
|
"Gb_type": "Encountered unconverted argument when attempting to inline",
|
|
"Context": "func: {func}, arg: {v}",
|
|
"Explanation": "An argument to an inlined function was not successfully converted to a VariableTracker.",
|
|
"Hints": [
|
|
"This is likely to be a Dynamo bug. Please report an issue to PyTorch."
|
|
]
|
|
}
|
|
],
|
|
"GB0048": [
|
|
{
|
|
"Gb_type": "Error getting associated real value",
|
|
"Context": "call_id {self}",
|
|
"Explanation": "Dynamo encountered an error while trying to get the associated real value.",
|
|
"Hints": []
|
|
}
|
|
],
|
|
"GB0049": [
|
|
{
|
|
"Gb_type": "Error when attempting to resolve op packet",
|
|
"Context": "",
|
|
"Explanation": "str(e)",
|
|
"Hints": []
|
|
}
|
|
],
|
|
"GB0050": [
|
|
{
|
|
"Gb_type": "Exception with bad expected type",
|
|
"Context": "str(expected_exc_types)",
|
|
"Explanation": "`except ...` has unsupported type {expected_exc_types}.",
|
|
"Hints": [
|
|
"Dynamo has detected that tracing the code will result in an error when running in eager. Please double check that your code doesn't contain a similar error when actually running eager/uncompiled."
|
|
]
|
|
}
|
|
],
|
|
"GB0051": [
|
|
{
|
|
"Gb_type": "Exception with non-type expectation",
|
|
"Context": "str(expected_type)",
|
|
"Explanation": "`except ...` expects a non-type: {expected_type}.",
|
|
"Hints": [
|
|
"Dynamo has detected that tracing the code will result in an error when running in eager. Please double check that your code doesn't contain a similar error when actually running eager/uncompiled."
|
|
]
|
|
}
|
|
],
|
|
"GB0052": [
|
|
{
|
|
"Gb_type": "Excessive RestartAnalysis() calls",
|
|
"Context": "",
|
|
"Explanation": "Dynamo attempted to trace the same frame 100+ times. Giving up on compiling as the compile time tradeoff is likely not worth the performance gain.",
|
|
"Hints": []
|
|
}
|
|
],
|
|
"GB0053": [
|
|
{
|
|
"Gb_type": "FSDP with use_orig_params=False",
|
|
"Context": "",
|
|
"Explanation": "Dynamo only supports FSDP with use_orig_params=True",
|
|
"Hints": []
|
|
}
|
|
],
|
|
"GB0054": [
|
|
{
|
|
"Gb_type": "Failed to construct Enum variable",
|
|
"Context": "value: {value_vt}, allowed enum values: {list(cls_type)}",
|
|
"Explanation": "Attempted to construct an Enum value that is non-constant (e.g. int, string) or is not an acceptable value for the Enum. Acceptable values for Enum `{cls_type}`: {list(cls_type)}.",
|
|
"Hints": [
|
|
"Dynamo has detected that tracing the code will result in an error when running in eager. Please double check that your code doesn't contain a similar error when actually running eager/uncompiled."
|
|
]
|
|
}
|
|
],
|
|
"GB0055": [
|
|
{
|
|
"Gb_type": "Failed to convert args/kwargs to proxy",
|
|
"Context": "call_function args: {typestr(*args)} {typestr(*list(kwargs.values()))}",
|
|
"Explanation": "Missing `as_proxy()` implementation for some arg/kwarg.",
|
|
"Hints": []
|
|
}
|
|
],
|
|
"GB0056": [
|
|
{
|
|
"Gb_type": "Failed to mutate tensor data attribute",
|
|
"Context": "setattr({obj}, {name}, {val})",
|
|
"Explanation": "Dyanmo only supports mutating `.data` of tensor created outside `torch.compile` region",
|
|
"Hints": [
|
|
"Don't mutate `.data` on this tensor, or move ",
|
|
"the mutation out of `torch.compile` region"
|
|
]
|
|
}
|
|
],
|
|
"GB0057": [
|
|
{
|
|
"Gb_type": "Failed to raise exception",
|
|
"Context": "str(exc)",
|
|
"Explanation": "Attempted to raise a non-Exception type/value.",
|
|
"Hints": [
|
|
"Dynamo has detected that tracing the code will result in an error when running in eager. Please double check that your code doesn't contain a similar error when actually running eager/uncompiled."
|
|
]
|
|
}
|
|
],
|
|
"GB0058": [
|
|
{
|
|
"Gb_type": "Failed to set tensor attribute",
|
|
"Context": "setattr({obj}, {name}, {val})",
|
|
"Explanation": "Dyanmo doesn't support setting these tensor attributes",
|
|
"Hints": [
|
|
"Don't mutate attribute '{name}' on tensors, or ",
|
|
"move the mutation out of `torch.compile` region"
|
|
]
|
|
}
|
|
],
|
|
"GB0059": [
|
|
{
|
|
"Gb_type": "Failed to trace builtin operator",
|
|
"Context": "builtin {fn.__name__} {arg_types} {has_kwargs}",
|
|
"Explanation": "Dynamo does not know how to trace builtin operator `{fn.__name__}` with argument types {real_arg_types} (has_kwargs {has_kwargs})",
|
|
"Hints": [
|
|
"Avoid calling builtin `{fn.__name__}` with argument types {real_arg_types}. ",
|
|
"Consider using an equivalent alternative function/method to `{fn.__name__}`.",
|
|
"If you are attempting to call a logging function (e.g. `print`), ",
|
|
"you can try adding it to `torch._dynamo.config.reorderable_logging_functions`.",
|
|
"Please report an issue to PyTorch."
|
|
]
|
|
}
|
|
],
|
|
"GB0060": [
|
|
{
|
|
"Gb_type": "Failed to trace unittest method",
|
|
"Context": "function: unittest.TestCase.{name}",
|
|
"Explanation": "Dynamo does not know how to trace unittest method `{name}` ",
|
|
"Hints": [
|
|
"Avoid calling `TestCase.{name}`. ",
|
|
"Please report an issue to PyTorch."
|
|
]
|
|
}
|
|
],
|
|
"GB0061": [
|
|
{
|
|
"Gb_type": "Failed to unpack object for BUILD_LIST_UNPACK",
|
|
"Context": "str(seq)",
|
|
"Explanation": "{seq} cannot be unpacked into a list for the BUILD_LIST_UNPACK bytecode (`[*x, *y, ...]`).",
|
|
"Hints": [
|
|
"Dynamo has detected that tracing the code will result in an error when running in eager. Please double check that your code doesn't contain a similar error when actually running eager/uncompiled."
|
|
]
|
|
}
|
|
],
|
|
"GB0062": [
|
|
{
|
|
"Gb_type": "Failed to unpack object for UNPACK_EX",
|
|
"Context": "str(seq)",
|
|
"Explanation": "{seq} cannot be unpacked into a list for the UNPACK_EX bytecode.",
|
|
"Hints": [
|
|
"Dynamo has detected that tracing the code will result in an error when running in eager. Please double check that your code doesn't contain a similar error when actually running eager/uncompiled."
|
|
]
|
|
}
|
|
],
|
|
"GB0063": [
|
|
{
|
|
"Gb_type": "Failed to unpack object for UNPACK_SEQUENCE",
|
|
"Context": "str(seq)",
|
|
"Explanation": "{seq} cannot be unpacked into a list for the UNPACK_SEQUENCE bytecode (i.e. `a, b, c = d`).",
|
|
"Hints": [
|
|
"Dynamo has detected that tracing the code will result in an error when running in eager. Please double check that your code doesn't contain a similar error when actually running eager/uncompiled."
|
|
]
|
|
}
|
|
],
|
|
"GB0064": [
|
|
{
|
|
"Gb_type": "Fake tensor propagation exception",
|
|
"Context": "str(e.reason)",
|
|
"Explanation": "msg",
|
|
"Hints": []
|
|
}
|
|
],
|
|
"GB0065": [
|
|
{
|
|
"Gb_type": "Graph break in inlined function",
|
|
"Context": "",
|
|
"Explanation": "Graph breaks in an inlined call are not supported.",
|
|
"Hints": []
|
|
}
|
|
],
|
|
"GB0066": [
|
|
{
|
|
"Gb_type": "Graph break under GenericContextWrappingVariable",
|
|
"Context": "Active generic context managers: {self.active_generic_context_managers}",
|
|
"Explanation": "Attempted to graph break in an active context manager(s) that doesn't support graph breaking.",
|
|
"Hints": [
|
|
"Move the offending context manager(s) to outside the compiled region.",
|
|
"This graph break may have been caused by an earlier graph break. Resolving the earlier graph break may resolve this one."
|
|
]
|
|
}
|
|
],
|
|
"GB0067": [
|
|
{
|
|
"Gb_type": "HigherOrderOperator: Mutating a variable not in the current scope (SideEffects)",
|
|
"Context": "",
|
|
"Explanation": "This is not supported.",
|
|
"Hints": []
|
|
}
|
|
],
|
|
"GB0068": [
|
|
{
|
|
"Gb_type": "Illegal method invocation in strict mode",
|
|
"Context": "call_method {self} {name} {args} {kwargs}",
|
|
"Explanation": "Dynamo currently does not support this method ({name}) invocation in strict mode.",
|
|
"Hints": []
|
|
}
|
|
],
|
|
"GB0069": [
|
|
{
|
|
"Gb_type": "Import failure",
|
|
"Context": "module_name: {module_name}, fromlist: {fromlist}, level={level}",
|
|
"Explanation": "Failure when attempting to import.",
|
|
"Hints": [
|
|
"Dynamo has detected that tracing the code will result in an error when running in eager. Please double check that your code doesn't contain a similar error when actually running eager/uncompiled."
|
|
]
|
|
}
|
|
],
|
|
"GB0070": [
|
|
{
|
|
"Gb_type": "Indexing list with non-scalar tensor",
|
|
"Context": "call_method {self} {name} {args} {kwargs}",
|
|
"Explanation": "Attempted to index list-like object with tensor with > 1 element.",
|
|
"Hints": [
|
|
"Dynamo has detected that tracing the code will result in an error when running in eager. Please double check that your code doesn't contain a similar error when actually running eager/uncompiled."
|
|
]
|
|
}
|
|
],
|
|
"GB0071": [
|
|
{
|
|
"Gb_type": "Inline attempt with __self__",
|
|
"Context": "str(func)",
|
|
"Explanation": "Attempted to inline a function with the `__self__` attribute. Dynamo is expected to decompose method calls into function calls with a `self` argument.",
|
|
"Hints": []
|
|
}
|
|
],
|
|
"GB0072": [
|
|
{
|
|
"Gb_type": "Inplace op on input tensor",
|
|
"Context": "",
|
|
"Explanation": "Attempted to trace an inplace view op on input tensor {typestr(self.value)}.",
|
|
"Hints": [
|
|
"Ensure you do not modify input tensor in place.",
|
|
"It may be possible to write Dynamo tracing rules for this code. Please report an issue to PyTorch if you encounter this graph break often and it is causing performance issues."
|
|
]
|
|
}
|
|
],
|
|
"GB0073": [
|
|
{
|
|
"Gb_type": "Invoking an nn.Module inside a HigherOrderOperator",
|
|
"Context": "",
|
|
"Explanation": "This is not supported.",
|
|
"Hints": []
|
|
}
|
|
],
|
|
"GB0074": [
|
|
{
|
|
"Gb_type": "Invoking an nn.Module inside a higher order operator",
|
|
"Context": "Higher order op name: {self.source_target}",
|
|
"Explanation": "This is not supported.",
|
|
"Hints": []
|
|
}
|
|
],
|
|
"GB0075": [
|
|
{
|
|
"Gb_type": "LOAD_BUILD_CLASS bytecode not supported",
|
|
"Context": "",
|
|
"Explanation": "Dynamo does not support tracing classes that are defined in the compiled region.",
|
|
"Hints": [
|
|
"Move the class definition out of the compiled region.",
|
|
"It may be possible to write Dynamo tracing rules for this code. Please report an issue to PyTorch if you encounter this graph break often and it is causing performance issues."
|
|
]
|
|
}
|
|
],
|
|
"GB0076": [
|
|
{
|
|
"Gb_type": "LOAD_FAST_CHECK on uninitialized variable",
|
|
"Context": "inst.argval",
|
|
"Explanation": "Attempted to load uninitialized local variable {inst.argval}",
|
|
"Hints": [
|
|
"Dynamo has detected that tracing the code will result in an error when running in eager. Please double check that your code doesn't contain a similar error when actually running eager/uncompiled."
|
|
]
|
|
}
|
|
],
|
|
"GB0077": [
|
|
{
|
|
"Gb_type": "Length mismatch when unpacking object for UNPACK_SEQUENCE",
|
|
"Context": "expected length: {inst.argval}, actual: {len(val)}",
|
|
"Explanation": "{seq} unpacked to a list for the UNPACK_SEQUENCE bytecode (i.e. `a, b, c = d`) with unexpected length.",
|
|
"Hints": [
|
|
"This is likely to be a Dynamo bug. Please report an issue to PyTorch."
|
|
]
|
|
}
|
|
],
|
|
"GB0078": [
|
|
{
|
|
"Gb_type": "Limitation of `nonstrict_trace",
|
|
"Context": "{self}",
|
|
"Explanation": "msg",
|
|
"Hints": [
|
|
"make sure definition of {fn_name} is outside ",
|
|
"`torch.compile` region"
|
|
]
|
|
}
|
|
],
|
|
"GB0079": [
|
|
{
|
|
"Gb_type": "Missing CALL_INTRINSIC_1 handler",
|
|
"Context": "CALL_INTRINSIC_1 operand: {inst.argval}",
|
|
"Explanation": "No handler implemented for CALL_INTRINSIC_1 {inst.argval} instruction.",
|
|
"Hints": [
|
|
"It may be possible to write Dynamo tracing rules for this code. Please report an issue to PyTorch if you encounter this graph break often and it is causing performance issues."
|
|
]
|
|
}
|
|
],
|
|
"GB0080": [
|
|
{
|
|
"Gb_type": "Missing FakeTensor example value",
|
|
"Context": "str(node)",
|
|
"Explanation": "`FakeTensor` example value was required for {node} but not available.",
|
|
"Hints": [
|
|
"This is likely to be a Dynamo bug. Please report an issue to PyTorch."
|
|
]
|
|
}
|
|
],
|
|
"GB0081": [
|
|
{
|
|
"Gb_type": "Missing attribute when running call_method node",
|
|
"Context": "",
|
|
"Explanation": "make_error_message(\"attribute not defined\")",
|
|
"Hints": []
|
|
}
|
|
],
|
|
"GB0082": [
|
|
{
|
|
"Gb_type": "Missing bytecode handler",
|
|
"Context": "{opname} with args {args}",
|
|
"Explanation": "Dynamo does not know how to handle the bytecode instruction `{opname}`.",
|
|
"Hints": [
|
|
"Do not trace code that produces the `{opname}` bytecode instruction ",
|
|
"(see https://docs.python.org/3/library/dis.html for bytecode semantics).",
|
|
"It may be possible to write Dynamo tracing rules for this code. Please report an issue to PyTorch if you encounter this graph break often and it is causing performance issues."
|
|
]
|
|
}
|
|
],
|
|
"GB0083": [
|
|
{
|
|
"Gb_type": "Module-level backwards hooks require compiled autograd.",
|
|
"Context": "",
|
|
"Explanation": "",
|
|
"Hints": [
|
|
"Enable compiled autograd by setting torch._dynamo.config.compiled_autograd = True."
|
|
]
|
|
}
|
|
],
|
|
"GB0084": [
|
|
{
|
|
"Gb_type": "Non-constant attribute given to `super().__delattr__()`",
|
|
"Context": "call_method {self} {name}",
|
|
"Explanation": "Dynamo requires the attribute name passed to `super().__delattr__(...)` to be a constant (string).",
|
|
"Hints": [
|
|
"Ensure the attribute name is a string literal or a constant variable."
|
|
]
|
|
}
|
|
],
|
|
"GB0085": [
|
|
{
|
|
"Gb_type": "Non-function or method in subclass of torch.autograd.Function",
|
|
"Context": "call_apply {self} {args} {kwargs}",
|
|
"Explanation": "Dynamo requires the `forward` attribute of a `torch.autograd.Function` subclass to be a standard Python function or method. Found type `{type(fn).__name__}` instead.",
|
|
"Hints": [
|
|
"Ensure the `forward` method is defined as a regular ",
|
|
"function or instance method."
|
|
]
|
|
}
|
|
],
|
|
"GB0086": [
|
|
{
|
|
"Gb_type": "Not a Python constant",
|
|
"Context": "guard_as_python_constant {self}",
|
|
"Explanation": "Failed to convert {self} into a Python constant.",
|
|
"Hints": []
|
|
}
|
|
],
|
|
"GB0087": [
|
|
{
|
|
"Gb_type": "NotImplementedError/UnsupportedFakeTensorException when running FX node",
|
|
"Context": "",
|
|
"Explanation": "make_error_message(e)",
|
|
"Hints": []
|
|
}
|
|
],
|
|
"GB0088": [
|
|
{
|
|
"Gb_type": "Observed exception",
|
|
"Context": "raised exception {curr_exc.python_type_name()}({curr_exc.args})",
|
|
"Explanation": "observed_exn_gb_explanation",
|
|
"Hints": [
|
|
"Dynamo has detected that tracing the code will result in an error when running in eager. Please double check that your code doesn't contain a similar error when actually running eager/uncompiled."
|
|
]
|
|
}
|
|
],
|
|
"GB0089": [
|
|
{
|
|
"Gb_type": "Observed exception (EXCEPT_HANDLER)",
|
|
"Context": "str(raised_exception)",
|
|
"Explanation": "observed_exn_gb_explanation + \" This graph break is unexpected.\"",
|
|
"Hints": [
|
|
"This is likely to be a Dynamo bug. Please report an issue to PyTorch."
|
|
]
|
|
}
|
|
],
|
|
"GB0090": [
|
|
{
|
|
"Gb_type": "Operator does not support running with fake tensors",
|
|
"Context": "unsupported operator: {cause.func}",
|
|
"Explanation": "",
|
|
"Hints": [
|
|
"{import_suggestion}see ",
|
|
"https://docs.google.com/document/d/1GgvOe7C8_NVOMLOCwDaYV1mXXyHMXY7ExoewHqooxrs/edit#heading=h.64r4npvq0w0",
|
|
" for how to fix"
|
|
]
|
|
}
|
|
],
|
|
"GB0091": [
|
|
{
|
|
"Gb_type": "Read uninitialized cell",
|
|
"Context": "str(cellvar)",
|
|
"Explanation": "Attempted to read a cell variable that has not been populated yet.",
|
|
"Hints": [
|
|
"Dynamo has detected that tracing the code will result in an error when running in eager. Please double check that your code doesn't contain a similar error when actually running eager/uncompiled."
|
|
]
|
|
}
|
|
],
|
|
"GB0092": [
|
|
{
|
|
"Gb_type": "Reconstruction failure",
|
|
"Context": "str(value)",
|
|
"Explanation": "Dynamo has no bytecode reconstruction implemented for sourceless variable {value}.",
|
|
"Hints": [
|
|
"If Dynamo is attempting to trace a return statement and your code is attempting to return a variable ",
|
|
"that Dynamo cannot reconstruct, then remove it from the return statement.",
|
|
"Report an issue to PyTorch if you need reconstrtuction support. Note that objects that don't have ",
|
|
"reconstruction rules may be fundamentally unreconstructable.",
|
|
"This graph break may have been caused by an earlier graph break. Resolving the earlier graph break may resolve this one."
|
|
]
|
|
}
|
|
],
|
|
"GB0093": [
|
|
{
|
|
"Gb_type": "Reconstruction failure: source.reconstruct not implemented",
|
|
"Context": "str(source)",
|
|
"Explanation": "Dynamo has no bytecode reconstruction implemented for {type(source)} variable {source}.",
|
|
"Hints": [
|
|
"This is likely to be a Dynamo bug. Please report an issue to PyTorch."
|
|
]
|
|
}
|
|
],
|
|
"GB0094": [
|
|
{
|
|
"Gb_type": "SEND with bad type",
|
|
"Context": "TOS type: {typestr(tos)}",
|
|
"Explanation": "Attempted to SEND with unsupported type {typestr(tos)}.",
|
|
"Hints": []
|
|
}
|
|
],
|
|
"GB0095": [
|
|
{
|
|
"Gb_type": "Set Exception object `__traceback__` attribute to not-`None`",
|
|
"Context": "call_setattr {self} {name}",
|
|
"Explanation": "Dynamo does not support setting the attribute '__traceback__' on tracked exception objects to anything other than None.",
|
|
"Hints": [
|
|
"Avoid setting '__traceback__' on exception objects ",
|
|
"within traced code, or set it to None."
|
|
]
|
|
}
|
|
],
|
|
"GB0096": [
|
|
{
|
|
"Gb_type": "Should not compile partial graph (STORE_ATTR)",
|
|
"Context": "",
|
|
"Explanation": "Dynamo has determined when encountering an unsupported STORE_ATTR instruction (i.e. `obj.attr = val`) that it should not compile the partial graph.",
|
|
"Hints": []
|
|
}
|
|
],
|
|
"GB0097": [
|
|
{
|
|
"Gb_type": "Side effect on existing deque with limited maxlen",
|
|
"Context": "",
|
|
"Explanation": "This is not supported.",
|
|
"Hints": [
|
|
"Don't use a deque with `maxlen` specified."
|
|
]
|
|
}
|
|
],
|
|
"GB0098": [
|
|
{
|
|
"Gb_type": "Skip calling `torch.compiler.disable()`d function",
|
|
"Context": "str(self.value)",
|
|
"Explanation": "Skip calling function `{self.value}` since it was wrapped with `torch.compiler.disable` (reason: {msg})",
|
|
"Hints": [
|
|
"Remove the `torch.compiler.disable` call"
|
|
]
|
|
}
|
|
],
|
|
"GB0099": [
|
|
{
|
|
"Gb_type": "Skip inlining `torch.compiler.disable()`d function",
|
|
"Context": "str(func.get_function())",
|
|
"Explanation": "Skip inlining function {func.get_function()} since it was wrapped with `torch.compiler.disable` (reason: {msg})",
|
|
"Hints": [
|
|
"Remove the `torch.compiler.disable` call"
|
|
]
|
|
}
|
|
],
|
|
"GB0100": [
|
|
{
|
|
"Gb_type": "Storing Tensor hook handle in globals",
|
|
"Context": "name",
|
|
"Explanation": "This is not supported.",
|
|
"Hints": []
|
|
}
|
|
],
|
|
"GB0101": [
|
|
{
|
|
"Gb_type": "Storing Tensor hook handle in globals (inline call)",
|
|
"Context": "inst.argval",
|
|
"Explanation": "This is not supported.",
|
|
"Hints": []
|
|
}
|
|
],
|
|
"GB0102": [
|
|
{
|
|
"Gb_type": "Strict mode banned op",
|
|
"Context": "var_getattr {self} {name}",
|
|
"Explanation": "Getattr invocation '{name}' in strict mode is not supported.",
|
|
"Hints": [
|
|
"Remove `{name}` from the list of banned ops by ",
|
|
"setting `torch._dynamo.config._autograd_backward_strict_mode_banned_ops`."
|
|
]
|
|
}
|
|
],
|
|
"GB0103": [
|
|
{
|
|
"Gb_type": "Tensor subclass overridden method call",
|
|
"Context": "{name}",
|
|
"Explanation": "`torch.compile` currently can't trace this",
|
|
"Hints": [
|
|
"Avoid calling {name} of tensor subclass in torch.compile region",
|
|
"Renaming method `{name}` of type {self.class_type}",
|
|
"It may be possible to write Dynamo tracing rules for this code. Please report an issue to PyTorch if you encounter this graph break often and it is causing performance issues."
|
|
]
|
|
}
|
|
],
|
|
"GB0104": [
|
|
{
|
|
"Gb_type": "Tensor with grad_fn()",
|
|
"Context": "var_getattr {self} grad_fn",
|
|
"Explanation": "Dynamo does not support tracing tensors with a grad_fn directly.",
|
|
"Hints": []
|
|
}
|
|
],
|
|
"GB0105": [
|
|
{
|
|
"Gb_type": "Tensor.numpy() with trace_numpy=False",
|
|
"Context": "call_method {self} numpy",
|
|
"Explanation": "`Tensor.numpy()` was called, but the `trace_numpy` configuration was manually disabled.",
|
|
"Hints": [
|
|
"Set `torch._dynamo.config.trace_numpy = True` to allow ",
|
|
"Dynamo to trace through NumPy."
|
|
]
|
|
}
|
|
],
|
|
"GB0106": [
|
|
{
|
|
"Gb_type": "Tensor.numpy() without NumPy installed",
|
|
"Context": "call_method {self} numpy",
|
|
"Explanation": "`Tensor.numpy()` was called, but the NumPy library is not available in the current environment.",
|
|
"Hints": [
|
|
"Ensure NumPy is installed in your Python environment."
|
|
]
|
|
}
|
|
],
|
|
"GB0107": [
|
|
{
|
|
"Gb_type": "Tensor.random_ op",
|
|
"Context": "Tensor.{name}(args={args}, kwargs={kwargs})",
|
|
"Explanation": "This is currently not supported.",
|
|
"Hints": [
|
|
"Use the out-of-place version of this op",
|
|
"It may be possible to write Dynamo tracing rules for this code. Please report an issue to PyTorch if you encounter this graph break often and it is causing performance issues."
|
|
]
|
|
}
|
|
],
|
|
"GB0108": [
|
|
{
|
|
"Gb_type": "Tensor.retain_grad() with AOTDispatcher",
|
|
"Context": "var_getattr {self} retain_grad",
|
|
"Explanation": "`Tensor.retain_grad()` does not work with AOTDispatcher.",
|
|
"Hints": []
|
|
}
|
|
],
|
|
"GB0109": [
|
|
{
|
|
"Gb_type": "Tensor.tolist() with non-integer tensor",
|
|
"Context": "call_method {self} to_list",
|
|
"Explanation": "Dynamo currently does not support tracing `tolist()` on non-integer tensors.",
|
|
"Hints": [
|
|
"Ensure the input tensor to `tolist()` is an integer ",
|
|
"type (e.g., int8, int16, int32, int64)."
|
|
]
|
|
}
|
|
],
|
|
"GB0110": [
|
|
{
|
|
"Gb_type": "Tensor.uniform_ op called with `from` keyword",
|
|
"Context": "Tensor.{name}(args={args}, kwargs={kwargs})",
|
|
"Explanation": "This is currently not supported.",
|
|
"Hints": [
|
|
"Avoid using the `from` keyword.",
|
|
"It may be possible to write Dynamo tracing rules for this code. Please report an issue to PyTorch if you encounter this graph break often and it is causing performance issues."
|
|
]
|
|
}
|
|
],
|
|
"GB0111": [
|
|
{
|
|
"Gb_type": "TypeError from user code",
|
|
"Context": "call_function({self.value}, {args}, {kwargs})",
|
|
"Explanation": "msg",
|
|
"Hints": [
|
|
"Dynamo has detected that tracing the code will result in an error when running in eager. Please double check that your code doesn't contain a similar error when actually running eager/uncompiled."
|
|
]
|
|
}
|
|
],
|
|
"GB0112": [
|
|
{
|
|
"Gb_type": "TypeError when making fake tensor call",
|
|
"Context": "TypeError {node.target}: {cause}",
|
|
"Explanation": "",
|
|
"Hints": []
|
|
}
|
|
],
|
|
"GB0113": [
|
|
{
|
|
"Gb_type": "Unable to resolve super getattr",
|
|
"Context": "",
|
|
"Explanation": "Dynamo failed to trace attribute `{name}` accessed via `super()` (for type `{self.typevar}` and object `{self.objvar}`) because the resolved attribute type is not supported.",
|
|
"Hints": [
|
|
"Ensure the attribute exists in the parent class.",
|
|
"Check the arguments passed to `super()`."
|
|
]
|
|
}
|
|
],
|
|
"GB0114": [
|
|
{
|
|
"Gb_type": "Unexpected failure during itertools.accumulate() iteration",
|
|
"Context": "call_function {self} {args} {kwargs}",
|
|
"Explanation": "Unexpected failure in invoking function during accumulate. Failed running func {func}({item}{acc})",
|
|
"Hints": [
|
|
"This graph break may be difficult to debug. Please report an issue to PyTorch for assistance."
|
|
]
|
|
}
|
|
],
|
|
"GB0115": [
|
|
{
|
|
"Gb_type": "Unexpected failure during itertools.groupby() iteration",
|
|
"Context": "call_function {self} {args} {kwargs}",
|
|
"Explanation": "Unexpected failure in invoking function during groupby",
|
|
"Hints": [
|
|
"It may be possible to write Dynamo tracing rules for this code. Please report an issue to PyTorch if you encounter this graph break often and it is causing performance issues."
|
|
]
|
|
}
|
|
],
|
|
"GB0116": [
|
|
{
|
|
"Gb_type": "Unexpected type in sourceless builder",
|
|
"Context": "{value_type.__module__}.{value_type.__qualname__}",
|
|
"Explanation": "SourcelessBuilder.create does not know how to wrap {value_type}",
|
|
"Hints": [
|
|
"This is likely to be a Dynamo bug. Please report an issue to PyTorch."
|
|
]
|
|
}
|
|
],
|
|
"GB0117": [
|
|
{
|
|
"Gb_type": "Unhandled args for method",
|
|
"Context": "call_method {self} {name} {args} {kwargs}",
|
|
"Explanation": "Dynamo encountered an error while calling the method `{name}`.",
|
|
"Hints": []
|
|
}
|
|
],
|
|
"GB0118": [
|
|
{
|
|
"Gb_type": "Unimplemented next() call",
|
|
"Context": "next({self})",
|
|
"Explanation": "This abstract method must be implemented",
|
|
"Hints": [
|
|
"This is likely to be a Dynamo bug. Please report an issue to PyTorch."
|
|
]
|
|
}
|
|
],
|
|
"GB0119": [
|
|
{
|
|
"Gb_type": "Uninitialized nn.Module",
|
|
"Context": "typestr(value)",
|
|
"Explanation": "Attempted to trace an uninitialized nn.Module of type {typestr(value)}.",
|
|
"Hints": [
|
|
"Ensure your nn.Module instance has called `super().__init__()`.",
|
|
"Dynamo has detected that tracing the code will result in an error when running in eager. Please double check that your code doesn't contain a similar error when actually running eager/uncompiled."
|
|
]
|
|
}
|
|
],
|
|
"GB0120": [
|
|
{
|
|
"Gb_type": "Unreachable sub-generator code",
|
|
"Context": "",
|
|
"Explanation": "Should only be encountered while implementing generator support.",
|
|
"Hints": []
|
|
}
|
|
],
|
|
"GB0121": [
|
|
{
|
|
"Gb_type": "UnspecializedNNModuleVariable missing method",
|
|
"Context": "call_method: {self} {name} {args} {kwargs}",
|
|
"Explanation": "Dynamo does not support tracing method {name} of nn.Module {self.value}",
|
|
"Hints": [
|
|
"Dynamo does not really define unspecialized nn.Module very well.",
|
|
"This graph break may be difficult to debug. Please report an issue to PyTorch for assistance."
|
|
]
|
|
}
|
|
],
|
|
"GB0122": [
|
|
{
|
|
"Gb_type": "Unsupported SourceType",
|
|
"Context": "MutationType.__init__ {self} {typ}",
|
|
"Explanation": "Dynamo does not support the type `{typ}`",
|
|
"Hints": [
|
|
"This branch is not supposed to be reachable.",
|
|
"This is likely to be a Dynamo bug. Please report an issue to PyTorch."
|
|
]
|
|
}
|
|
],
|
|
"GB0123": [
|
|
{
|
|
"Gb_type": "Unsupported Tensor.backward() call",
|
|
"Context": "call_method {self} backward {args} {kwargs}",
|
|
"Explanation": "Dynamo currently does not support tracing `Tensor.backward()`.",
|
|
"Hints": [
|
|
"This graph break is fundamental - it is unlikely that Dynamo will ever be able to trace through your code. Consider finding a workaround."
|
|
]
|
|
}
|
|
],
|
|
"GB0124": [
|
|
{
|
|
"Gb_type": "Unsupported Tensor.item() call with capture_scalar_outputs=False",
|
|
"Context": "call_method {self} item {args} {kwargs}",
|
|
"Explanation": "Dynamo does not support tracing `Tensor.item()` with config.capture_scalar_outputs=False.",
|
|
"Hints": [
|
|
"Set `torch._dynamo.config.capture_scalar_outputs = True` ",
|
|
"or `export TORCHDYNAMO_CAPTURE_SCALAR_OUTPUTS=1` ",
|
|
"to include these operations in the captured graph."
|
|
]
|
|
}
|
|
],
|
|
"GB0125": [
|
|
{
|
|
"Gb_type": "Unsupported Tensor.requires_grad_() call",
|
|
"Context": "call_method {self} requires_grad_",
|
|
"Explanation": "Dynamo does not support changes to a Tensor's `requires_grad` through calling `requires_grad_()`.",
|
|
"Hints": []
|
|
}
|
|
],
|
|
"GB0126": [
|
|
{
|
|
"Gb_type": "Unsupported Tensor.resize_() call",
|
|
"Context": "call_method {self} resize_ {args} {kwargs}",
|
|
"Explanation": "Dynamo currently does not support tracing `Tensor.resize_()`.",
|
|
"Hints": []
|
|
}
|
|
],
|
|
"GB0127": [
|
|
{
|
|
"Gb_type": "Unsupported Tensor.resize_as_() call",
|
|
"Context": "call_method {self} resize_as_ {args} {kwargs}",
|
|
"Explanation": "Dynamo currently does not support tracing `Tensor.resize_as_()`.",
|
|
"Hints": []
|
|
}
|
|
],
|
|
"GB0128": [
|
|
{
|
|
"Gb_type": "Unsupported Tensor.set_() call",
|
|
"Context": "call_method {self} set_ {args} {kwargs}",
|
|
"Explanation": "Dynamo currently does not support tracing `Tensor.set_()` overloads that include more than one argument.",
|
|
"Hints": [
|
|
"It may be possible to write Dynamo tracing rules for this code. Please report an issue to PyTorch if you encounter this graph break often and it is causing performance issues."
|
|
]
|
|
}
|
|
],
|
|
"GB0129": [
|
|
{
|
|
"Gb_type": "Unsupported Tensor.sparse_resize_() call",
|
|
"Context": "call_method {self} sparse_resize_ {args} {kwargs}",
|
|
"Explanation": "Dynamo currently does not support tracing `Tensor.sparse_resize_()`.",
|
|
"Hints": []
|
|
}
|
|
],
|
|
"GB0130": [
|
|
{
|
|
"Gb_type": "Unsupported Tensor.sparse_resize_and_clear_() call",
|
|
"Context": "call_method {self} sparse_resize_and_clear_ {args} {kwargs}",
|
|
"Explanation": "Dynamo currently does not support tracing `Tensor.sparse_resize_and_clear_()`.",
|
|
"Hints": []
|
|
}
|
|
],
|
|
"GB0131": [
|
|
{
|
|
"Gb_type": "Unsupported __setitem__/__setattr__ inline attempt",
|
|
"Context": "code name: {code.co_name}, args: {args}",
|
|
"Explanation": "Attempted to inline {code.co_name} where first argument (self) is not a user-defined object.",
|
|
"Hints": []
|
|
}
|
|
],
|
|
"GB0132": [
|
|
{
|
|
"Gb_type": "Unsupported `func` in itertools.accumulate",
|
|
"Context": "call_function {self} {args} {kwargs}",
|
|
"Explanation": "Dynamo does not know how to get the function to use for itertools.accumulate. itertools.accumulate expects the `func` as the second argument or as a keyword argument.",
|
|
"Hints": [
|
|
"Dynamo has detected that tracing the code will result in an error when running in eager. Please double check that your code doesn't contain a similar error when actually running eager/uncompiled."
|
|
]
|
|
}
|
|
],
|
|
"GB0133": [
|
|
{
|
|
"Gb_type": "Unsupported arguments for itertools.accumulate",
|
|
"Context": "call_function {self} {args} {kwargs}",
|
|
"Explanation": "Dynamo does not know how to trace itertools.accumulate with args: {args} and kwargs: {kwargs}. itertools.accumulate expects an iterable, an optional binary function for accumulation, and an optional initial value to set the starting state.",
|
|
"Hints": [
|
|
"Make sure the arguments to itertools.accumulate are correct.",
|
|
"It may be possible to write Dynamo tracing rules for this code. Please report an issue to PyTorch if you encounter this graph break often and it is causing performance issues."
|
|
]
|
|
}
|
|
],
|
|
"GB0134": [
|
|
{
|
|
"Gb_type": "Unsupported arguments for itertools.groupby",
|
|
"Context": "call_function {self} {args} {kwargs}",
|
|
"Explanation": "Dynamo does not know how to trace itertools.groupby with args: {args} and kwargs: {kwargs}. itertools.groupby expects an iterable to group and an optional key function to determine groupings.",
|
|
"Hints": [
|
|
"Make sure the arguments to itertools.groupby are correct.",
|
|
"It may be possible to write Dynamo tracing rules for this code. Please report an issue to PyTorch if you encounter this graph break often and it is causing performance issues."
|
|
]
|
|
}
|
|
],
|
|
"GB0135": [
|
|
{
|
|
"Gb_type": "Unsupported attribute assignment on Exception object",
|
|
"Context": "call_setattr {self} {name}",
|
|
"Explanation": "Dynamo does not support setting the attribute '{name}' on tracked exception objects. Only `__context__`, `__cause__`, `__suppress_context__`, and `__traceback__` are supported.",
|
|
"Hints": [
|
|
"It may be possible to write Dynamo tracing rules for this code. Please report an issue to PyTorch if you encounter this graph break often and it is causing performance issues."
|
|
]
|
|
}
|
|
],
|
|
"GB0136": [
|
|
{
|
|
"Gb_type": "Unsupported attribute for range() object",
|
|
"Context": "var_getattr {self} {name}",
|
|
"Explanation": "Expected attribute to be one of {','.join(fields)} but got {name}",
|
|
"Hints": [
|
|
"Dynamo has detected that tracing the code will result in an error when running in eager. Please double check that your code doesn't contain a similar error when actually running eager/uncompiled."
|
|
]
|
|
}
|
|
],
|
|
"GB0137": [
|
|
{
|
|
"Gb_type": "Unsupported attribute for slice() object",
|
|
"Context": "var_getattr {self} {name}",
|
|
"Explanation": "Expected attribute to be one of {','.join(fields)} but got {name}",
|
|
"Hints": [
|
|
"Dynamo has detected that tracing the code will result in an error when running in eager. Please double check that your code doesn't contain a similar error when actually running eager/uncompiled."
|
|
]
|
|
}
|
|
],
|
|
"GB0138": [
|
|
{
|
|
"Gb_type": "Unsupported autograd.Function context `save_for_backward`",
|
|
"Context": "call_method {self} {name}",
|
|
"Explanation": "Dynamo requires the `saved_tensors` attribute to be initialized on the `autograd.Function` context object.",
|
|
"Hints": [
|
|
"Ensure that the `saved_tensors` attribute is properly ",
|
|
"initialized before calling `save_for_backward`. ",
|
|
"`save_for_backward` only supported on a newly constructed `torch.autograd.function.FunctionCtx`."
|
|
]
|
|
}
|
|
],
|
|
"GB0139": [
|
|
{
|
|
"Gb_type": "Unsupported autograd.Function context method",
|
|
"Context": "call_method {self} {name}",
|
|
"Explanation": "Dynamo does not support calling the method `{name}` on `autograd.Function` context objects. Supported methods are `__setattr__`, `save_for_backward` and `mark_non_differentiable`.",
|
|
"Hints": [
|
|
"It may be possible to write Dynamo tracing rules for this code. Please report an issue to PyTorch if you encounter this graph break often and it is causing performance issues."
|
|
]
|
|
}
|
|
],
|
|
"GB0140": [
|
|
{
|
|
"Gb_type": "Unsupported autograd.Function method",
|
|
"Context": "call_method {self} {name}",
|
|
"Explanation": "Dynamo does not support calling the method `{name}` directly on the `torch.autograd.Function` instance. Supported methods include `apply`, `backward`, static methods, and class methods.",
|
|
"Hints": [
|
|
"Ensure the method is decorated with `@staticmethod` ",
|
|
"or `@classmethod` if it's meant to be called on the class."
|
|
]
|
|
}
|
|
],
|
|
"GB0141": [
|
|
{
|
|
"Gb_type": "Unsupported call_id() without source",
|
|
"Context": "call_id {self}",
|
|
"Explanation": "call_id() not supported for sourceless TensorVariable.",
|
|
"Hints": []
|
|
}
|
|
],
|
|
"GB0142": [
|
|
{
|
|
"Gb_type": "Unsupported context manager",
|
|
"Context": "Attempted SETUP_WITH/BEFORE_WITH/LOAD_SPECIAL on {ctx}",
|
|
"Explanation": "Dynamo does not know how to enter a `{ctx.python_type_name()}` context manager.",
|
|
"Hints": [
|
|
"Avoid using the unsupported context manager.",
|
|
"If the context manager seems like it should be supported (e.g. torch.set_grad_enabled), then ",
|
|
"it may be the case that it was created outside the compiled region, which Dynamo does not support. ",
|
|
"Supported context managers can cross graph break boundaries only if they are local non-closure ",
|
|
"variables, or are intermediate values.",
|
|
"File an issue to PyTorch. Simple context managers can potentially be supported, ",
|
|
"but note that context managers can't be supported in general"
|
|
]
|
|
},
|
|
{
|
|
"Gb_type": "Unsupported context manager",
|
|
"Context": "Attempted SETUP_WITH/BEFORE_WITH on {ctx}",
|
|
"Explanation": "Dynamo does not know how to enter a `{ctx.python_type_name()}` context manager.",
|
|
"Hints": [
|
|
"Avoid using the unsupported context manager.",
|
|
"If the context manager seems like it should be supported (e.g. torch.set_grad_enabled), then ",
|
|
"it may be the case that it was created outside the compiled region, which Dynamo does not support. ",
|
|
"Supported context managers can cross graph break boundaries only if they are local non-closure ",
|
|
"variables, or are intermediate values.",
|
|
"File an issue to PyTorch. Simple context managers can potentially be supported, ",
|
|
"but note that context managers can't be supported in general"
|
|
]
|
|
}
|
|
],
|
|
"GB0143": [
|
|
{
|
|
"Gb_type": "Unsupported conversion for slice assignment",
|
|
"Context": "call_method {self} {name} {args}",
|
|
"Explanation": "Missing dynamo support for converting {value} into a list for slice assignment.",
|
|
"Hints": [
|
|
"It may be possible to write Dynamo tracing rules for this code. Please report an issue to PyTorch if you encounter this graph break often and it is causing performance issues."
|
|
]
|
|
}
|
|
],
|
|
"GB0144": [
|
|
{
|
|
"Gb_type": "Unsupported custom jvp",
|
|
"Context": "call_apply {self} {args} {kwargs}",
|
|
"Explanation": "Dynamo does not support tracing `torch.autograd.Function` subclasses that define a custom `jvp` method.",
|
|
"Hints": [
|
|
"Remove the custom `jvp` method if possible.",
|
|
"It may be possible to write Dynamo tracing rules for this code. Please report an issue to PyTorch if you encounter this graph break often and it is causing performance issues."
|
|
]
|
|
}
|
|
],
|
|
"GB0145": [
|
|
{
|
|
"Gb_type": "Unsupported custom vjp",
|
|
"Context": "call_apply {self} {args} {kwargs}",
|
|
"Explanation": "Dynamo does not support tracing `torch.autograd.Function` subclasses that define a custom `vjp` method.",
|
|
"Hints": [
|
|
"Remove the custom `vjp` method if possible.",
|
|
"Use standard `backward` instead if applicable.",
|
|
"It may be possible to write Dynamo tracing rules for this code. Please report an issue to PyTorch if you encounter this graph break often and it is causing performance issues."
|
|
]
|
|
}
|
|
],
|
|
"GB0146": [
|
|
{
|
|
"Gb_type": "Unsupported event method",
|
|
"Context": "str(name)",
|
|
"Explanation": "Dynamo doesn't support tracing the {method_name} method. We currently support wait, record, synchronize, and query.",
|
|
"Hints": [
|
|
"It may be possible to write Dynamo tracing rules for this code. Please report an issue to PyTorch if you encounter this graph break often and it is causing performance issues."
|
|
]
|
|
}
|
|
],
|
|
"GB0147": [
|
|
{
|
|
"Gb_type": "Unsupported function call",
|
|
"Context": "call_function {self} {args} {kwargs}",
|
|
"Explanation": "Dynamo does not know how to trace the function `{self.debug_repr()}`",
|
|
"Hints": [
|
|
"Avoid calling `{self.debug_repr()}` in your code.",
|
|
"Please report an issue to PyTorch."
|
|
]
|
|
}
|
|
],
|
|
"GB0148": [
|
|
{
|
|
"Gb_type": "Unsupported function call (delayed)",
|
|
"Context": "source: {self.source}",
|
|
"Explanation": "Dynamo determined that a graph break should occur when calling `{self.source.name()}`. Reason: {self.msg}",
|
|
"Hints": []
|
|
}
|
|
],
|
|
"GB0149": [
|
|
{
|
|
"Gb_type": "Unsupported functorch tracing attempt",
|
|
"Context": "",
|
|
"Explanation": "msg",
|
|
"Hints": []
|
|
}
|
|
],
|
|
"GB0150": [
|
|
{
|
|
"Gb_type": "Unsupported hasattr call",
|
|
"Context": "call_obj_hasattr {self} {name}",
|
|
"Explanation": "Dynamo does not know how to trace the function `{self.debug_repr()}`",
|
|
"Hints": [
|
|
"Avoid calling `hasattr({self.__class__.__name__}, {name})` in your code.",
|
|
"It may be possible to write Dynamo tracing rules for this code. Please report an issue to PyTorch if you encounter this graph break often and it is causing performance issues."
|
|
]
|
|
}
|
|
],
|
|
"GB0151": [
|
|
{
|
|
"Gb_type": "Unsupported inspect call",
|
|
"Context": "inspect_parameter_names {self}",
|
|
"Explanation": "Dynamo does not know how to trace the function `{self.debug_repr()}`",
|
|
"Hints": []
|
|
}
|
|
],
|
|
"GB0152": [
|
|
{
|
|
"Gb_type": "Unsupported key type for itertools.groupby",
|
|
"Context": "call_function {self} {args} {kwargs}",
|
|
"Explanation": "Dynamo does not know how to trace itertools.groupby with key type: {str(type(key))}. We only support grouping keys that are constants (int, float, str, etc.)",
|
|
"Hints": [
|
|
"It may be possible to write Dynamo tracing rules for this code. Please report an issue to PyTorch if you encounter this graph break often and it is causing performance issues."
|
|
]
|
|
}
|
|
],
|
|
"GB0153": [
|
|
{
|
|
"Gb_type": "Unsupported key type for nn.Module.__getitem__",
|
|
"Context": "call_method: {self} {name} {args} {kwargs}",
|
|
"Explanation": "Dynamo does not support getitem on `nn.Module` with non-constant key.",
|
|
"Hints": []
|
|
}
|
|
],
|
|
"GB0154": [
|
|
{
|
|
"Gb_type": "Unsupported kwargs for itertools.accumulate",
|
|
"Context": "call_function {self} {args} {kwargs}",
|
|
"Explanation": "Expected kwargs: 'initial', 'func', but got {','.join(set(kwargs.keys()) - {'initial', 'func'})}",
|
|
"Hints": [
|
|
"Dynamo has detected that tracing the code will result in an error when running in eager. Please double check that your code doesn't contain a similar error when actually running eager/uncompiled."
|
|
]
|
|
}
|
|
],
|
|
"GB0155": [
|
|
{
|
|
"Gb_type": "Unsupported kwargs for itertools.groupby",
|
|
"Context": "call_function {self} {args} {kwargs}",
|
|
"Explanation": "Expected kwargs: 'key', but got {','.join(set(kwargs.keys()) - {'key'})}",
|
|
"Hints": [
|
|
"Dynamo has detected that tracing the code will result in an error when running in eager. Please double check that your code doesn't contain a similar error when actually running eager/uncompiled."
|
|
]
|
|
}
|
|
],
|
|
"GB0156": [
|
|
{
|
|
"Gb_type": "Unsupported method call",
|
|
"Context": "call_method {self} {name} {args} {kwargs}",
|
|
"Explanation": "Dynamo does not know how to trace method `{name}` of class `{self.python_type_name()}`",
|
|
"Hints": []
|
|
}
|
|
],
|
|
"GB0157": [
|
|
{
|
|
"Gb_type": "Unsupported ndarray attribute access",
|
|
"Context": "var_getattr {self} {name}",
|
|
"Explanation": "Dynamo currently does not support tracing `ndarray.{name}`.",
|
|
"Hints": []
|
|
}
|
|
],
|
|
"GB0158": [
|
|
{
|
|
"Gb_type": "Unsupported ndarray method call",
|
|
"Context": "call_method {self} {name} {args} {kwargs}",
|
|
"Explanation": "`ndarray.{name}()` is not modelled in `torch._numpy`.",
|
|
"Hints": []
|
|
}
|
|
],
|
|
"GB0159": [
|
|
{
|
|
"Gb_type": "Unsupported ndarray.__version__ access",
|
|
"Context": "var_getattr {self} {name}",
|
|
"Explanation": "Dynamo currently does not support tracing `ndarray.{name}`.",
|
|
"Hints": []
|
|
}
|
|
],
|
|
"GB0160": [
|
|
{
|
|
"Gb_type": "Unsupported next() call",
|
|
"Context": "next({self})",
|
|
"Explanation": "Dynamo does not know how to trace calling `next()` on variable `{self}`.",
|
|
"Hints": [
|
|
"Dynamo has detected that tracing the code will result in an error when running in eager. Please double check that your code doesn't contain a similar error when actually running eager/uncompiled."
|
|
]
|
|
}
|
|
],
|
|
"GB0161": [
|
|
{
|
|
"Gb_type": "Unsupported nn.Module attribute type",
|
|
"Context": "nn.Module subclass: {typestr(base)}, name: {name}, attribute type: {typestr(subobj)}",
|
|
"Explanation": "Dynamo does not support tracing nn.Module attributes of type `{typestr(subobj)}`",
|
|
"Hints": [
|
|
"Refactor your code so that `{name}` (type `{typestr(subobj)}`) is not an attribute of `{typestr(base)}`",
|
|
"Currently supported attribute types are methods, classmethods, staticmethods, ",
|
|
"properties, constants, and tensors.",
|
|
"It may be possible to write Dynamo tracing rules for this code. Please report an issue to PyTorch if you encounter this graph break often and it is causing performance issues."
|
|
]
|
|
}
|
|
],
|
|
"GB0162": [
|
|
{
|
|
"Gb_type": "Unsupported super().__init__() call",
|
|
"Context": "call_method {self} {name} {args} {kwargs}",
|
|
"Explanation": "Dynamo encountered a super().__init__() call on {objvar} that resolved to a `torch.nn.Module.__init__()` call that we cannot trace.",
|
|
"Hints": [
|
|
"This graph break may be difficult to debug. Please report an issue to PyTorch for assistance."
|
|
]
|
|
}
|
|
],
|
|
"GB0163": [
|
|
{
|
|
"Gb_type": "Unsupported tensor subclass attribute access",
|
|
"Context": "{name}",
|
|
"Explanation": "`torch.compile` currently can't trace this",
|
|
"Hints": [
|
|
"Avoid accessing {name} of tensor subclass in torch.compile region",
|
|
"It may be possible to write Dynamo tracing rules for this code. Please report an issue to PyTorch if you encounter this graph break often and it is causing performance issues."
|
|
]
|
|
}
|
|
],
|
|
"GB0164": [
|
|
{
|
|
"Gb_type": "Unsupported tensor subclass overridden attribute access",
|
|
"Context": "{name}",
|
|
"Explanation": "`torch.compile` only support tracing certain types of overridden tensor subclass attributes",
|
|
"Hints": [
|
|
"Avoid accessing {name} of tensor subclass in torch.compile region",
|
|
"Renaming attribute `{name}` of type {self.class_type}",
|
|
"It may be possible to write Dynamo tracing rules for this code. Please report an issue to PyTorch if you encounter this graph break often and it is causing performance issues."
|
|
]
|
|
}
|
|
],
|
|
"GB0165": [
|
|
{
|
|
"Gb_type": "Unsupported torch._C._ImperativeEngine method",
|
|
"Context": "call_method {self} {name}",
|
|
"Explanation": "Dynamo only supports the `queue_callback` method on a torch._C._ImperativeEngine instance, but found: `{name}`.",
|
|
"Hints": []
|
|
}
|
|
],
|
|
"GB0166": [
|
|
{
|
|
"Gb_type": "Unsupported torch._C._ImperativeEngine.queue_callback()",
|
|
"Context": "call_method {self} {name}",
|
|
"Explanation": "queue_callback() is only supported when Compiled Autograd is enabled with fullgraph=True.",
|
|
"Hints": []
|
|
}
|
|
],
|
|
"GB0167": [
|
|
{
|
|
"Gb_type": "Variadic function call with bad args/kwargs type",
|
|
"Context": "args type: {typestr(argsvars)}, kwargs type: {typestr(kwargsvars)}",
|
|
"Explanation": "Expected args to be a list and kwargs to be a dict",
|
|
"Hints": [
|
|
"Dynamo has detected that tracing the code will result in an error when running in eager. Please double check that your code doesn't contain a similar error when actually running eager/uncompiled."
|
|
]
|
|
}
|
|
],
|
|
"GB0168": [
|
|
{
|
|
"Gb_type": "Variadic function call with bad flags",
|
|
"Context": "flags: {inst.argval}",
|
|
"Explanation": "Attempted to call a variadic function (CALL_FUNCTION_EX) with bad flags {inst.argval}",
|
|
"Hints": [
|
|
"This is likely to be a Dynamo bug. Please report an issue to PyTorch."
|
|
]
|
|
}
|
|
],
|
|
"GB0169": [
|
|
{
|
|
"Gb_type": "Write to immutable cell",
|
|
"Context": "cellvar: {cellvar}, value: {value}",
|
|
"Explanation": "Dynamo doesn't support writing to immutable/sourceless cell variables.",
|
|
"Hints": [
|
|
"This graph break may be difficult to debug. Please report an issue to PyTorch for assistance."
|
|
]
|
|
}
|
|
],
|
|
"GB0170": [
|
|
{
|
|
"Gb_type": "Data-dependent branching",
|
|
"Context": "attempted to jump with {value}",
|
|
"Explanation": "_explanation",
|
|
"Hints": [
|
|
"Use `torch.cond` to express dynamic control flow.",
|
|
"This graph break is fundamental - it is unlikely that Dynamo will ever be able to trace through your code. Consider finding a workaround."
|
|
]
|
|
},
|
|
{
|
|
"Gb_type": "Data-dependent branching",
|
|
"Context": "attempted to jump with {value}",
|
|
"Explanation": "_explanation",
|
|
"Hints": []
|
|
},
|
|
{
|
|
"Gb_type": "_gb_type",
|
|
"Context": "attempted to jump with {value}",
|
|
"Explanation": "_explanation",
|
|
"Hints": []
|
|
}
|
|
],
|
|
"GB0171": [
|
|
{
|
|
"Gb_type": "assert with non-string message",
|
|
"Context": "str(args)",
|
|
"Explanation": "Dynamo only supports asserts with string messages",
|
|
"Hints": [
|
|
"It may be possible to write Dynamo tracing rules for this code. Please report an issue to PyTorch if you encounter this graph break often and it is causing performance issues."
|
|
]
|
|
}
|
|
],
|
|
"GB0172": [
|
|
{
|
|
"Gb_type": "async_op=True for distributed collectives",
|
|
"Context": "{self.fn}, args={args}, kwargs={kwargs}",
|
|
"Explanation": "`torch.compile` doesn't support `async_op=True for {self.fn}",
|
|
"Hints": [
|
|
"It may be possible to write Dynamo tracing rules for this code. Please report an issue to PyTorch if you encounter this graph break often and it is causing performance issues."
|
|
]
|
|
}
|
|
],
|
|
"GB0173": [
|
|
{
|
|
"Gb_type": "backward_state does not support export",
|
|
"Context": "",
|
|
"Explanation": "Compiled autograd doesn't work with `torch.export`.",
|
|
"Hints": []
|
|
}
|
|
],
|
|
"GB0174": [
|
|
{
|
|
"Gb_type": "bad args to builtin cast()",
|
|
"Context": "got args {args} {kwargs}",
|
|
"Explanation": "Dynamo expects exactly 2 args to builtin cast().",
|
|
"Hints": [
|
|
"Ensure your call to cast() has exactly 2 arguments."
|
|
]
|
|
}
|
|
],
|
|
"GB0175": [
|
|
{
|
|
"Gb_type": "builtin isinstance() cannot determine type of argument",
|
|
"Context": "isinstance({arg}, {isinstance_type})",
|
|
"Explanation": "Dynamo doesn't have a rule to determine the type of argument {arg}",
|
|
"Hints": [
|
|
"This is likely to be a Dynamo bug. Please report an issue to PyTorch."
|
|
]
|
|
}
|
|
],
|
|
"GB0176": [
|
|
{
|
|
"Gb_type": "call_id() without associated real value",
|
|
"Context": "call_id {self}",
|
|
"Explanation": "Dynamo could not find an associated real value for the tensor.",
|
|
"Hints": []
|
|
}
|
|
],
|
|
"GB0177": [
|
|
{
|
|
"Gb_type": "can't handle functions not implemented in python ",
|
|
"Context": "{fn}",
|
|
"Explanation": "Dynamo can only handle functions defined in python",
|
|
"Hints": [
|
|
"Move usage of this function out of `torch.compile` region",
|
|
"Avoid using `tensor.is_inference()` and `torch.is_inference_mode_enabled()` in your compile code. This is primarily used in conjunction with `torch.inference_mode`. Consider using `torch.no_grad` instead because `torch.no_grad` leads to same improvements as `inference_mode` when `torch.compile` is used."
|
|
]
|
|
}
|
|
],
|
|
"GB0178": [
|
|
{
|
|
"Gb_type": "constant fold exception",
|
|
"Context": "attempted to run function {fn} with arguments {args}",
|
|
"Explanation": "Encountered exception when attempting to constant fold.",
|
|
"Hints": [
|
|
"This is likely to be a Dynamo bug. Please report an issue to PyTorch."
|
|
]
|
|
}
|
|
],
|
|
"GB0179": [
|
|
{
|
|
"Gb_type": "copy.deepcopy()",
|
|
"Context": "copy.deepcopy({x})",
|
|
"Explanation": "Dynamo does not support copy.deepcopy()",
|
|
"Hints": [
|
|
"Avoid calling copy.deepcopy()",
|
|
"It may be possible to write Dynamo tracing rules for this code. Please report an issue to PyTorch if you encounter this graph break often and it is causing performance issues."
|
|
]
|
|
}
|
|
],
|
|
"GB0180": [
|
|
{
|
|
"Gb_type": "dataclass fields failure",
|
|
"Context": "obj: {obj}; variable type: {type(obj)}",
|
|
"Explanation": "Dataclass fields handling fails for {obj}. Expected it to be a user-defined object.",
|
|
"Hints": []
|
|
}
|
|
],
|
|
"GB0181": [
|
|
{
|
|
"Gb_type": "dtype mismatch between tensor and its gradient",
|
|
"Context": "tensor dtype: {value.dtype}; grad dtype: {safe_grad(value).dtype}",
|
|
"Explanation": "Inconsistent dtype between tensor and its gradient. This can happen in FSDP and crashes meta tensor creation.",
|
|
"Hints": [
|
|
"It may be possible to write Dynamo tracing rules for this code. Please report an issue to PyTorch if you encounter this graph break often and it is causing performance issues."
|
|
]
|
|
}
|
|
],
|
|
"GB0182": [
|
|
{
|
|
"Gb_type": "failed to broadcast when attempting Tensor comparison op",
|
|
"Context": "{op.__name__}({left}, {right})",
|
|
"Explanation": "Dynamo was unable to broad cast the arguments {left}, {right} when attempting to trace the comparison op {op.__name__}.",
|
|
"Hints": [
|
|
"Dynamo has detected that tracing the code will result in an error when running in eager. Please double check that your code doesn't contain a similar error when actually running eager/uncompiled."
|
|
]
|
|
}
|
|
],
|
|
"GB0183": [
|
|
{
|
|
"Gb_type": "failed to call dict.fromkeys()",
|
|
"Context": "{user_cls.__name__}.fromkeys(): {args} {kwargs}",
|
|
"Explanation": "Failed to call {user_cls.__name__}.fromkeys() because arguments could not be automatically converted to a list, or some dict key is not hashable.",
|
|
"Hints": [
|
|
"Manually convert the argument to a list.",
|
|
"Ensure all keys are hashable."
|
|
]
|
|
}
|
|
],
|
|
"GB0184": [
|
|
{
|
|
"Gb_type": "failed to call str() on user defined object",
|
|
"Context": "str(arg)",
|
|
"Explanation": "User defined object has no __str__ or __repr__ method",
|
|
"Hints": [
|
|
"Dynamo has detected that tracing the code will result in an error when running in eager. Please double check that your code doesn't contain a similar error when actually running eager/uncompiled."
|
|
]
|
|
}
|
|
],
|
|
"GB0185": [
|
|
{
|
|
"Gb_type": "failed to convert numpy.ndarray to Tensor",
|
|
"Context": "str(value)",
|
|
"Explanation": "Exception encountered when attempting to convert numpy.ndarray to Tensor",
|
|
"Hints": []
|
|
}
|
|
],
|
|
"GB0186": [
|
|
{
|
|
"Gb_type": "functools.partial() with non-literal keyword",
|
|
"Context": "non-literal keyword: {k}",
|
|
"Explanation": "functools.partial() expects literal/string keywords",
|
|
"Hints": [
|
|
"Dynamo has detected that tracing the code will result in an error when running in eager. Please double check that your code doesn't contain a similar error when actually running eager/uncompiled."
|
|
]
|
|
}
|
|
],
|
|
"GB0187": [
|
|
{
|
|
"Gb_type": "functools.wraps",
|
|
"Context": "{fn}",
|
|
"Explanation": "`torch.compile` can't trace `functools.wraps` on functions defined outside the compile region",
|
|
"Hints": [
|
|
"It may be possible to write Dynamo tracing rules for this code. Please report an issue to PyTorch if you encounter this graph break often and it is causing performance issues."
|
|
]
|
|
}
|
|
],
|
|
"GB0188": [
|
|
{
|
|
"Gb_type": "getattr with no source",
|
|
"Context": "var_getattr {self} {name}",
|
|
"Explanation": "Dynamo does not know how to access an attribute on an `nn.Module` instance that lacks a source. This is usually an internal error in Dynamo.",
|
|
"Hints": [
|
|
"This is likely to be a Dynamo bug. Please report an issue to PyTorch."
|
|
]
|
|
}
|
|
],
|
|
"GB0189": [
|
|
{
|
|
"Gb_type": "getattr() on nn.Module with pending mutation",
|
|
"Context": "getattr({obj}, {name}, {default})",
|
|
"Explanation": "Intentionally graph breaking on getattr() on a nn.Module with a pending mutation",
|
|
"Hints": []
|
|
}
|
|
],
|
|
"GB0190": [
|
|
{
|
|
"Gb_type": "getattr() with non-constant name argument",
|
|
"Context": "getattr({obj}, {name_var}, {default})",
|
|
"Explanation": "getattr() with non-constant name argument is not supported",
|
|
"Hints": [
|
|
"Ensure the name argument of getattr() is a string"
|
|
]
|
|
}
|
|
],
|
|
"GB0191": [
|
|
{
|
|
"Gb_type": "id() with unsupported args",
|
|
"Context": "str(args)",
|
|
"Explanation": "Dynamo doesn't know how to trace id() call with args {args}",
|
|
"Hints": [
|
|
"Supported args are Tensors, and functions/nn.Modules/user-defined objects ",
|
|
"from outside the compiled region.",
|
|
"It may be possible to write Dynamo tracing rules for this code. Please report an issue to PyTorch if you encounter this graph break often and it is causing performance issues."
|
|
]
|
|
}
|
|
],
|
|
"GB0192": [
|
|
{
|
|
"Gb_type": "input iterator to itertools.cycle has too many items",
|
|
"Context": "next({self})",
|
|
"Explanation": "Has reached internal Dynamo max iterator limit: {MAX_ITERATOR_LIMIT}",
|
|
"Hints": []
|
|
}
|
|
],
|
|
"GB0193": [
|
|
{
|
|
"Gb_type": "invalid call to builtin op handler",
|
|
"Context": "invalid args to {self_handler}: {args} {kwargs}",
|
|
"Explanation": "Encountered TypeError when trying to handle op {fn.__name__}",
|
|
"Hints": [
|
|
"This graph break may be difficult to debug. Please report an issue to PyTorch for assistance."
|
|
]
|
|
}
|
|
],
|
|
"GB0194": [
|
|
{
|
|
"Gb_type": "isinstance() called on user defined object with C extensions",
|
|
"Context": "isinstance({arg}, {isinstance_type})",
|
|
"Explanation": "User-defined object with C extensions can have torch.Tensor attributes; intentionally graph breaking.",
|
|
"Hints": [
|
|
"It may be possible to write Dynamo tracing rules for this code. Please report an issue to PyTorch if you encounter this graph break often and it is causing performance issues."
|
|
]
|
|
}
|
|
],
|
|
"GB0195": [
|
|
{
|
|
"Gb_type": "issubclass() with non-constant arguments",
|
|
"Context": "issubclass({left_ty}, {right_ty})",
|
|
"Explanation": "issubclass() with non-constant arguments not supported.",
|
|
"Hints": [
|
|
"Make sure your arguments are types.",
|
|
"Dynamo has detected that tracing the code will result in an error when running in eager. Please double check that your code doesn't contain a similar error when actually running eager/uncompiled."
|
|
]
|
|
}
|
|
],
|
|
"GB0196": [
|
|
{
|
|
"Gb_type": "key not found in dict",
|
|
"Context": "Key {arg.value}",
|
|
"Explanation": "msg",
|
|
"Hints": [
|
|
"Check if the key exists in the dictionary before accessing it.",
|
|
"Dynamo has detected that tracing the code will result in an error when running in eager. Please double check that your code doesn't contain a similar error when actually running eager/uncompiled."
|
|
]
|
|
}
|
|
],
|
|
"GB0197": [
|
|
{
|
|
"Gb_type": "list elements are pointing to the list itself",
|
|
"Context": "",
|
|
"Explanation": "Dynamo does not support lists whose items reference to itself",
|
|
"Hints": [
|
|
"Avoid using self referential list"
|
|
]
|
|
}
|
|
],
|
|
"GB0198": [
|
|
{
|
|
"Gb_type": "mapping proxy affected by dictionary mutation",
|
|
"Context": "Source: {self.source}, Dict mutation detected",
|
|
"Explanation": "msg",
|
|
"Hints": [
|
|
"Avoid modifying dictionaries that might be referenced by mapping proxy objects",
|
|
"Or avoid using the mapping proxy objects after modifying its underlying dictionary"
|
|
]
|
|
}
|
|
],
|
|
"GB0199": [
|
|
{
|
|
"Gb_type": "mapping proxy cannot be reconstructed",
|
|
"Context": "Source: {self.source}",
|
|
"Explanation": "msg",
|
|
"Hints": [
|
|
"Use a mapping proxy constructed in the same `torch.compile` region.",
|
|
"It may be possible to write Dynamo tracing rules for this code. Please report an issue to PyTorch if you encounter this graph break often and it is causing performance issues."
|
|
]
|
|
}
|
|
],
|
|
"GB0200": [
|
|
{
|
|
"Gb_type": "missing BUILD_SET handler",
|
|
"Context": "",
|
|
"Explanation": "Missing BUILD_SET bytecode handler (for testing purposes).",
|
|
"Hints": []
|
|
}
|
|
],
|
|
"GB0201": [
|
|
{
|
|
"Gb_type": "namedtuple construction",
|
|
"Context": "args={args}, kwargs={kwargs}",
|
|
"Explanation": "`torch.compile` only support certain input types for namedtuple",
|
|
"Hints": [
|
|
"It may be possible to write Dynamo tracing rules for this code. Please report an issue to PyTorch if you encounter this graph break often and it is causing performance issues."
|
|
]
|
|
}
|
|
],
|
|
"GB0202": [
|
|
{
|
|
"Gb_type": "non-const argument in nn.Module method",
|
|
"Context": "call_method: {self} {name} {args} {kwargs}",
|
|
"Explanation": "Dynamo does not support calling method `{name}` of ``nn.Module`` {module} with non-constant arguments.",
|
|
"Hints": []
|
|
}
|
|
],
|
|
"GB0203": [
|
|
{
|
|
"Gb_type": "non-const keys in dict_keys",
|
|
"Context": "non-const keys: {[k for k in value if not ConstantVariable.is_literal(k)]}",
|
|
"Explanation": "Dynamo expects dict_keys keys to be constants.",
|
|
"Hints": [
|
|
"Ensure your dict_keys keys are constants (e.g. int, float, strings)"
|
|
]
|
|
}
|
|
],
|
|
"GB0204": [
|
|
{
|
|
"Gb_type": "non-const keys in mappingproxy",
|
|
"Context": "non-const keys: {[k for k in value.keys() if not ConstantVariable.is_literal(k)]}",
|
|
"Explanation": "Dynamo expects mappingproxy keys to be constants.",
|
|
"Hints": [
|
|
"Ensure your mappingproxy keys are constants (e.g. int, float, strings)"
|
|
]
|
|
}
|
|
],
|
|
"GB0205": [
|
|
{
|
|
"Gb_type": "proxy not set",
|
|
"Context": "as_proxy {self}",
|
|
"Explanation": "Dynamo requires the autograd.Function context to be initialized with a proxy.",
|
|
"Hints": [
|
|
"This is likely to be a Dynamo bug. Please report an issue to PyTorch."
|
|
]
|
|
}
|
|
],
|
|
"GB0206": [
|
|
{
|
|
"Gb_type": "setattr() on Tensor.requires_grad",
|
|
"Context": "setattr({obj}, {name}, {val})",
|
|
"Explanation": "setattr() on Tensor.requires_grad not supported. Mutating requires_grad can introduce a new leaf from non-leaf or vice versa in the middle of the graph, which AOTAutograd does not currently know how to handle.",
|
|
"Hints": [
|
|
"It may be possible to write Dynamo tracing rules for this code. Please report an issue to PyTorch if you encounter this graph break often and it is causing performance issues."
|
|
]
|
|
}
|
|
],
|
|
"GB0207": [
|
|
{
|
|
"Gb_type": "sort with non-constant keys",
|
|
"Context": "str(first_non_constant_key)",
|
|
"Explanation": "Cannot perform sort with non-constant key. First non-constant key type: {python_type}. Most notably, we cannot sort with Tensor or SymInt keys, but we can sort ints.",
|
|
"Hints": [
|
|
"Use something else as the key."
|
|
]
|
|
}
|
|
],
|
|
"GB0208": [
|
|
{
|
|
"Gb_type": "torch.* op returned non-Tensor",
|
|
"Context": "example_value type: {typestr(example_value)}; op: {proxy.node.op}; target: {proxy.node.target}",
|
|
"Explanation": "torch.* ops that return a non-Tensor cannot be traced into the Dynamo FX graph output",
|
|
"Hints": []
|
|
}
|
|
],
|
|
"GB0209": [
|
|
{
|
|
"Gb_type": "torch.autograd._unsafe_preserve_version_counter escaped from compiled region",
|
|
"Context": "str(self)",
|
|
"Explanation": "Dynamo doesn't support compiling a region that returns a torch.autograd._unsafe_preserve_version_counter context manager.",
|
|
"Hints": [
|
|
"It may be possible to write Dynamo tracing rules for this code. Please report an issue to PyTorch if you encounter this graph break often and it is causing performance issues."
|
|
]
|
|
}
|
|
],
|
|
"GB0210": [
|
|
{
|
|
"Gb_type": "torch.distributed package is not available!",
|
|
"Context": "",
|
|
"Explanation": "The PyTorch package doesn't include torch.distributed when building from source.",
|
|
"Hints": [
|
|
"Set USE_DISTRIBUTED=1 to enable it when building PyTorch from source."
|
|
]
|
|
}
|
|
],
|
|
"GB0211": [
|
|
{
|
|
"Gb_type": "torch.nn.Module with a non-function custom __getattr__",
|
|
"Context": "var_getattr {self} {name}",
|
|
"Explanation": "Dynamo detected a nn.Module object with a custom `__getattr__` method, but this method is not a standard Python function (e.g., it might be implemented in C/C++). Dynamo cannot currently trace into such non-standard `__getattr__` methods.",
|
|
"Hints": [
|
|
"Avoid using objects with non-standard __getattr__ methods ",
|
|
"within the compiled region. If possible, implement ",
|
|
"__getattr__ as a standard Python function.",
|
|
"It may be possible to write Dynamo tracing rules for this code. Please report an issue to PyTorch if you encounter this graph break often and it is causing performance issues."
|
|
]
|
|
}
|
|
],
|
|
"GB0212": [
|
|
{
|
|
"Gb_type": "torch.profiler object escaped from compiled region",
|
|
"Context": "str(self)",
|
|
"Explanation": "Dynamo doesn't support compiling a region that returns a torch.profiler context manager.",
|
|
"Hints": [
|
|
"It may be possible to write Dynamo tracing rules for this code. Please report an issue to PyTorch if you encounter this graph break often and it is causing performance issues."
|
|
]
|
|
}
|
|
],
|
|
"GB0213": [
|
|
{
|
|
"Gb_type": "unimplemented builtin op on tensor arguments",
|
|
"Context": "partial tensor op: {self} {args} {kwargs}",
|
|
"Explanation": "Dynamo does not know how to trace builtin operator {self.fn} with tensor arguments",
|
|
"Hints": [
|
|
"It may be possible to write Dynamo tracing rules for this code. Please report an issue to PyTorch if you encounter this graph break often and it is causing performance issues."
|
|
]
|
|
}
|
|
],
|
|
"GB0214": [
|
|
{
|
|
"Gb_type": "unsupported SymNode comparison op",
|
|
"Context": "{op.__name__}({left}, {right})",
|
|
"Explanation": "Dynamo does not support the comparison op {op.__name__} with SymNode arguments {left}, {right}",
|
|
"Hints": [
|
|
"It may be possible to write Dynamo tracing rules for this code. Please report an issue to PyTorch if you encounter this graph break often and it is causing performance issues."
|
|
]
|
|
}
|
|
],
|
|
"GB0215": [
|
|
{
|
|
"Gb_type": "unsupported Tensor comparison op",
|
|
"Context": "{op.__name__}({left}, {right})",
|
|
"Explanation": "Dynamo does not support the comparison op {op.__name__} with Tensor arguments {left}, {right}",
|
|
"Hints": [
|
|
"It may be possible to write Dynamo tracing rules for this code. Please report an issue to PyTorch if you encounter this graph break often and it is causing performance issues."
|
|
]
|
|
}
|
|
],
|
|
"GB0216": [
|
|
{
|
|
"Gb_type": "unsupported grid type for triton hop check_grid",
|
|
"Context": "grid type = {type(grid)}",
|
|
"Explanation": "`torch.compile` only supports list-like grid for check_grid",
|
|
"Hints": [
|
|
"It may be possible to write Dynamo tracing rules for this code. Please report an issue to PyTorch if you encounter this graph break often and it is causing performance issues."
|
|
]
|
|
}
|
|
],
|
|
"GB0217": [
|
|
{
|
|
"Gb_type": "unsupported hasattr operation",
|
|
"Context": "Class {self.user_cls}",
|
|
"Explanation": "msg",
|
|
"Hints": [
|
|
"Consider using a regular dictionary instead",
|
|
"It may be possible to write Dynamo tracing rules for this code. Please report an issue to PyTorch if you encounter this graph break often and it is causing performance issues."
|
|
]
|
|
}
|
|
],
|
|
"GB0218": [
|
|
{
|
|
"Gb_type": "unsupported index(Tensor)",
|
|
"Context": "",
|
|
"Explanation": "Dynamo does not support tracing builtin index() on a Tensor",
|
|
"Hints": []
|
|
}
|
|
],
|
|
"GB0219": [
|
|
{
|
|
"Gb_type": "Backend compiler exception",
|
|
"Context": "Backend: {name}\nException:{str(e)}\nTraceback:\n{self.root_tx.format_frame_summary()}",
|
|
"Explanation": "Backend compiler `{name}` failed with {str(e)}. Adding a graph break.",
|
|
"Hints": [
|
|
"Report an issue to the backend compiler repo."
|
|
]
|
|
}
|
|
],
|
|
"GB0220": [
|
|
{
|
|
"Gb_type": "Failed to mutate tensor data attribute to different dtype",
|
|
"Context": "setattr({obj}, {name}, {val})",
|
|
"Explanation": "Dyanmo only supports mutating `.data` of tensor to a new one with the same dtype",
|
|
"Hints": [
|
|
"Don't mutate `.data` on this tensor, or move ",
|
|
"the mutation out of `torch.compile` region"
|
|
]
|
|
}
|
|
],
|
|
"GB0221": [
|
|
{
|
|
"Gb_type": "non-generator contextlib.contextmanager",
|
|
"Context": "str(self.vt.get_code())",
|
|
"Explanation": "Cannot compile function decorated with `@contextlib.contextmanager` that is not a generator, i.e. does not use `yield`",
|
|
"Hints": [
|
|
"Use `yield` in the function body instead of `return`.",
|
|
"Remove the `@contextlib.contextmanager` decorator."
|
|
]
|
|
}
|
|
],
|
|
"GB0222": [
|
|
{
|
|
"Gb_type": "Attempted to wrap a set with tensors",
|
|
"Context": "Python set containing torch.Tensor elements",
|
|
"Explanation": "Dynamo cannot trace sets of tensors. To get a stable ordering, Dynamo needs to convert the set into a list and the order might not be stable if the set contains tensors.",
|
|
"Hints": [
|
|
"Use a dictionary where the keys are tensors.",
|
|
"It may be possible to write Dynamo tracing rules for this code. Please report an issue to PyTorch if you encounter this graph break often and it is causing performance issues."
|
|
]
|
|
}
|
|
],
|
|
"GB0223": [
|
|
{
|
|
"Gb_type": "torch.compile call with > 1 args",
|
|
"Context": "args={args}, kwargs={kwargs}",
|
|
"Explanation": "Attempted to call `torch.compile` with > 1 args. Dynamo does not support this.",
|
|
"Hints": [
|
|
"Remove the torch.compile call or its additional args.",
|
|
"It may be possible to write Dynamo tracing rules for this code. Please report an issue to PyTorch if you encounter this graph break often and it is causing performance issues."
|
|
]
|
|
}
|
|
],
|
|
"GB0224": [
|
|
{
|
|
"Gb_type": "Attempted to call torch in-graph function on only torch.SymInt arguments",
|
|
"Context": "fn={self.value}, args={args}, kwargs={kwargs}",
|
|
"Explanation": "Attempted to call {str(self.value)} (that should be put in the FX graph) on only torch.SymInt arguments. Dynamo does not support this.",
|
|
"Hints": [
|
|
"It may be possible to write Dynamo tracing rules for this code. Please report an issue to PyTorch if you encounter this graph break often and it is causing performance issues."
|
|
]
|
|
}
|
|
],
|
|
"GB0225": [
|
|
{
|
|
"Gb_type": "Attempted to use tensor creation function with requires_grad=True",
|
|
"Context": "fn={self.value}, args={args}, kwargs={kwargs}",
|
|
"Explanation": "Dynamo does not support this.",
|
|
"Hints": [
|
|
"Create the tensor outside the compiled region.",
|
|
"Do not set `requires_grad=True`.",
|
|
"It may be possible to write Dynamo tracing rules for this code. Please report an issue to PyTorch if you encounter this graph break often and it is causing performance issues."
|
|
]
|
|
}
|
|
],
|
|
"GB0226": [
|
|
{
|
|
"Gb_type": "`torch.nn.Parameter()` with unsupported data type",
|
|
"Context": "data={data}",
|
|
"Explanation": "Called `torch.nn.Parameter()` with non-Tensor argument.",
|
|
"Hints": [
|
|
"Ensure the argument to `torch.nn.Parameter()` is a `torch.Tensor`.",
|
|
"Dynamo has detected that tracing the code will result in an error when running in eager. Please double check that your code doesn't contain a similar error when actually running eager/uncompiled."
|
|
]
|
|
}
|
|
],
|
|
"GB0227": [
|
|
{
|
|
"Gb_type": "Attempted to use torch.nn.Parameter constructor with tensor subclass",
|
|
"Context": "str(data)",
|
|
"Explanation": "Dynamo does not support this.",
|
|
"Hints": [
|
|
"It may be possible to write Dynamo tracing rules for this code. Please report an issue to PyTorch if you encounter this graph break often and it is causing performance issues."
|
|
]
|
|
}
|
|
],
|
|
"GB0228": [
|
|
{
|
|
"Gb_type": "`torch.nn.Parameter`: cannot convert to traceable tracable",
|
|
"Context": "",
|
|
"Explanation": "convert_tracable_parameter is set to False.",
|
|
"Hints": [
|
|
"Check usage of context manager: do_not_convert_to_tracable_parameter",
|
|
"This graph break may be difficult to debug. Please report an issue to PyTorch for assistance."
|
|
]
|
|
}
|
|
],
|
|
"GB0229": [
|
|
{
|
|
"Gb_type": "Unexpected type of data placeholder op for parameter construction",
|
|
"Context": "data_node.op={data_node.op}",
|
|
"Explanation": "Data node op should be placeholder or get_attr.",
|
|
"Hints": [
|
|
"This graph break may be difficult to debug. Please report an issue to PyTorch for assistance."
|
|
]
|
|
}
|
|
],
|
|
"GB0230": [
|
|
{
|
|
"Gb_type": "Attempted to use torch.use_deterministic_algorithms(warn_only=True)",
|
|
"Context": "mode={mode}, warn_only={warn_only}",
|
|
"Explanation": "Dynamo does not support this.",
|
|
"Hints": [
|
|
"Remove param warn_only in function call torch.use_deterministic_algorithms.",
|
|
"It may be possible to write Dynamo tracing rules for this code. Please report an issue to PyTorch if you encounter this graph break often and it is causing performance issues."
|
|
]
|
|
}
|
|
],
|
|
"GB0231": [
|
|
{
|
|
"Gb_type": "call `torch.from_numpy` with `torch._dynamo.config.trace_numpy=False`",
|
|
"Context": "trace_numpy={config.trace_numpy}",
|
|
"Explanation": "Attempted to call `torch.from_numpy` with config `torch._dynamo.config.trace_numpy` set to `False`.",
|
|
"Hints": [
|
|
"Change `torch._dynamo.config.trace_numpy` to `True`."
|
|
]
|
|
}
|
|
],
|
|
"GB0232": [
|
|
{
|
|
"Gb_type": "`torch.from_numpy` with NumPy unavailable",
|
|
"Context": "",
|
|
"Explanation": "Attempted to call `torch.numpy` but NumPy could not be imported.",
|
|
"Hints": [
|
|
"Check NumPy version and installation in your environment.",
|
|
"Dynamo has detected that tracing the code will result in an error when running in eager. Please double check that your code doesn't contain a similar error when actually running eager/uncompiled."
|
|
]
|
|
}
|
|
],
|
|
"GB0233": [
|
|
{
|
|
"Gb_type": "Attempted to use strided NestedTensor",
|
|
"Context": "layout={layout}",
|
|
"Explanation": "Dynamo does not support this.",
|
|
"Hints": [
|
|
"Change layout=torch.jagged.",
|
|
"It may be possible to write Dynamo tracing rules for this code. Please report an issue to PyTorch if you encounter this graph break often and it is causing performance issues."
|
|
]
|
|
}
|
|
],
|
|
"GB0234": [
|
|
{
|
|
"Gb_type": "Attempted to pop from empty torch function mode stack",
|
|
"Context": "",
|
|
"Explanation": "Called `torch._C._pop_torch_function_stack` when torch function mode stack is empty.",
|
|
"Hints": [
|
|
"Do not pop from empty torch function mode stack.",
|
|
"Dynamo has detected that tracing the code will result in an error when running in eager. Please double check that your code doesn't contain a similar error when actually running eager/uncompiled."
|
|
]
|
|
}
|
|
],
|
|
"GB0235": [
|
|
{
|
|
"Gb_type": "`torch.nn.Parameter` with non-constant Tensor attributes",
|
|
"Context": "data={data}",
|
|
"Explanation": "Dynamo does not support this.",
|
|
"Hints": [
|
|
"Ensure the Tensor argument's shape, dtype, and device are correct.",
|
|
"Dynamo has detected that tracing the code will result in an error when running in eager. Please double check that your code doesn't contain a similar error when actually running eager/uncompiled."
|
|
]
|
|
}
|
|
],
|
|
"GB0236": [
|
|
{
|
|
"Gb_type": "Invalid input type for nonstrict_trace-ed function",
|
|
"Context": "Encountered input of type <{type_name}>.",
|
|
"Explanation": "For `nonstrict_trace`-ed functions, only basic types (e.g., torch.Tensor, int, float) or pytree containers of those are allowed as inputs. The provided argument contains an unsupported type.",
|
|
"Hints": [
|
|
"Use one of the following to register the type with pytree:\n",
|
|
"* `torch.utils._pytree.register_constant`\n",
|
|
"* `torch.utils._pytree.register_dataclass`\n",
|
|
"* `torch.utils._pytree.register_pytree_node`"
|
|
]
|
|
}
|
|
],
|
|
"GB0237": [
|
|
{
|
|
"Gb_type": "non-constant `requires_grad` argument to `torch.nn.Parameter`",
|
|
"Context": "requires_grad={requires_grad}",
|
|
"Explanation": "Dynamo does not support this.",
|
|
"Hints": [
|
|
"Change `requires_grad` to be a bool.",
|
|
"Dynamo has detected that tracing the code will result in an error when running in eager. Please double check that your code doesn't contain a similar error when actually running eager/uncompiled."
|
|
]
|
|
}
|
|
],
|
|
"GB0238": [
|
|
{
|
|
"Gb_type": "Input marked with `pytree.register_constant` constructed in the `torch.compile` region",
|
|
"Context": "Input={input_spec_vt}, offending type <{type_name}>.",
|
|
"Explanation": "Calling a `nonstrict_trace`-ed function with an input that contains an object of type <{type_name}>, which was marked with `pytree.register_constant`. However, the object was constructed _inside_ the `torch.compile` region. This is not supported.",
|
|
"Hints": [
|
|
"Construct the object _outside_ the `torch.compile` region, or submit an issue to GitHub.",
|
|
"It may be possible to write Dynamo tracing rules for this code. Please report an issue to PyTorch if you encounter this graph break often and it is causing performance issues."
|
|
]
|
|
}
|
|
],
|
|
"GB0239": [
|
|
{
|
|
"Gb_type": "Invalid use of pytree_flatten with nonstrict_trace-ed function",
|
|
"Context": "Input={input_spec_vt}, offending type <{type_name}>.",
|
|
"Explanation": "Calling a `nonstrict_trace`-ed function where one of the inputs has been registered with a `pytree_flatten` that places an object of type <{type_name}> into the context.",
|
|
"Hints": [
|
|
"Modifying the `pytree_flatten` to avoid placing the object into the context.",
|
|
"Apply one of the following to <{type_name}>:\n",
|
|
"* `torch.utils._pytree.register_constant`\n",
|
|
"* `torch.utils._pytree.register_dataclass`\n",
|
|
"* `torch.utils._pytree.register_pytree_node`",
|
|
"It may be possible to write Dynamo tracing rules for this code. Please report an issue to PyTorch if you encounter this graph break often and it is causing performance issues."
|
|
]
|
|
}
|
|
],
|
|
"GB0240": [
|
|
{
|
|
"Gb_type": "Shape mismatch with out= list of tensor variants",
|
|
"Context": "fn={self.value}, args={args}, kwargs={kwargs}",
|
|
"Explanation": "Shape mismatch when calling {self.value} with `out=`. Provided `out=` shape: {saved_out_shape}. Actual shape: {fake_out.shape}.",
|
|
"Hints": [
|
|
"It may be possible to write Dynamo tracing rules for this code. Please report an issue to PyTorch if you encounter this graph break often and it is causing performance issues."
|
|
]
|
|
}
|
|
],
|
|
"GB0241": [
|
|
{
|
|
"Gb_type": "Attempted to call op with non-contiguous `out=` list of tensors",
|
|
"Context": "self.value={self.value}, args={args}, kwargs={kwargs}",
|
|
"Explanation": "Dynamo does not support this.",
|
|
"Hints": [
|
|
"It may be possible to write Dynamo tracing rules for this code. Please report an issue to PyTorch if you encounter this graph break often and it is causing performance issues."
|
|
]
|
|
}
|
|
],
|
|
"GB0242": [
|
|
{
|
|
"Gb_type": "Attempted to call op with non-contiguous `out=` tensor",
|
|
"Context": "self.value={self.value}, args={args}, kwargs={kwargs}",
|
|
"Explanation": "Dynamo does not support this.",
|
|
"Hints": [
|
|
"It may be possible to write Dynamo tracing rules for this code. Please report an issue to PyTorch if you encounter this graph break often and it is causing performance issues."
|
|
]
|
|
}
|
|
],
|
|
"GB0243": [
|
|
{
|
|
"Gb_type": "Attempted to use `torch.nn.modules.utils._ntuple` with unsupported argument type",
|
|
"Context": "value={value}",
|
|
"Explanation": "Dynamo does not support this.",
|
|
"Hints": [
|
|
"Change use of _ntuple with argument as constant or tensor."
|
|
]
|
|
}
|
|
],
|
|
"GB0244": [
|
|
{
|
|
"Gb_type": "Attempted to use `torch.nn.Parameter()` with export",
|
|
"Context": "",
|
|
"Explanation": "Dynamo does not support this.",
|
|
"Hints": [
|
|
"Do not use `torch.nn.Parameter()` with export.",
|
|
"It may be possible to write Dynamo tracing rules for this code. Please report an issue to PyTorch if you encounter this graph break often and it is causing performance issues."
|
|
]
|
|
}
|
|
],
|
|
"GB0245": [
|
|
{
|
|
"Gb_type": "Attempted to use `nested_tensor` with non-list input",
|
|
"Context": "tensor_list={tensor_list}",
|
|
"Explanation": "Dynamo does not support this.",
|
|
"Hints": [
|
|
"Change `nested_tensor` with list input.",
|
|
"Dynamo has detected that tracing the code will result in an error when running in eager. Please double check that your code doesn't contain a similar error when actually running eager/uncompiled."
|
|
]
|
|
}
|
|
],
|
|
"GB0246": [
|
|
{
|
|
"Gb_type": "Attempted to use `torch.nn.functional.one_hot` with data-dependent output shape",
|
|
"Context": "args={args}, kwargs={kwargs}",
|
|
"Explanation": "Dynamo does not support this.",
|
|
"Hints": [
|
|
"Explicitly set the `num_classes` param of the function call ",
|
|
"`torch.nn.functional.one_hot` to something other than -1."
|
|
]
|
|
}
|
|
],
|
|
"GB0247": [
|
|
{
|
|
"Gb_type": "Shape mismatch with out= tensor variant",
|
|
"Context": "fn={self.value}, args={args}, kwargs={kwargs}",
|
|
"Explanation": "Shape mismatch when calling {self.value} with `out=`. Provided `out=` shape: {saved_out_shapes}. Actual shape: {fake_out.shape}.",
|
|
"Hints": [
|
|
"It may be possible to write Dynamo tracing rules for this code. Please report an issue to PyTorch if you encounter this graph break often and it is causing performance issues."
|
|
]
|
|
}
|
|
],
|
|
"GB0248": [
|
|
{
|
|
"Gb_type": "improper torch.get_device_module arguments",
|
|
"Context": "args={args}, kwargs={kwargs}",
|
|
"Explanation": "torch.get_device_module accepts 1 optional argument `device`",
|
|
"Hints": [
|
|
"Dynamo has detected that tracing the code will result in an error when running in eager. Please double check that your code doesn't contain a similar error when actually running eager/uncompiled."
|
|
]
|
|
}
|
|
],
|
|
"GB0249": [
|
|
{
|
|
"Gb_type": "bad device argument to torch.get_device_module",
|
|
"Context": "args={args}, kwargs={kwargs}",
|
|
"Explanation": "Expected valid string/torch.device argument ('cpu', 'cuda', etc.)",
|
|
"Hints": [
|
|
"Dynamo has detected that tracing the code will result in an error when running in eager. Please double check that your code doesn't contain a similar error when actually running eager/uncompiled."
|
|
]
|
|
}
|
|
],
|
|
"GB0250": [
|
|
{
|
|
"Gb_type": "ndarray.astype(object)",
|
|
"Context": "call_method {self} {name} {args} {kwargs}",
|
|
"Explanation": "`ndarray.astype('O')` or `ndarray.astype(object)` is not supported by torch.compile, as there is no equivalent to object type in torch.Tensor. This will be executed eagerly.",
|
|
"Hints": [
|
|
"This graph break is fundamental - it is unlikely that Dynamo will ever be able to trace through your code. Consider finding a workaround."
|
|
]
|
|
}
|
|
],
|
|
"GB0251": [
|
|
{
|
|
"Gb_type": "Unsupported output type for nonstrict_trace-ed function",
|
|
"Context": "Function: {fn.__name__}",
|
|
"Explanation": "For `nonstrict_trace`-ed functions, only basic types (e.g., torch.Tensor, int, list) are allowed as output. The result of this call contains an unsupported type.",
|
|
"Hints": [
|
|
"It may be possible to write Dynamo tracing rules for this code. Please report an issue to PyTorch if you encounter this graph break often and it is causing performance issues."
|
|
]
|
|
}
|
|
],
|
|
"GB0252": [
|
|
{
|
|
"Gb_type": "could not find name in object's mro",
|
|
"Context": "name={name}, object type={type(self.value)}, mro={type(self.value).__mro__}",
|
|
"Explanation": "Could not find name `{name}` in mro {type(self.value).__mro__}",
|
|
"Hints": [
|
|
"Ensure the name `{name}` is defined somewhere in {self.value}'s type hierarchy.",
|
|
"Dynamo has detected that tracing the code will result in an error when running in eager. Please double check that your code doesn't contain a similar error when actually running eager/uncompiled."
|
|
]
|
|
}
|
|
],
|
|
"GB0253": [
|
|
{
|
|
"Gb_type": "call_method on generator",
|
|
"Context": "object={self.value}, method={name}, args={args}, kwargs={kwargs}",
|
|
"Explanation": "Detected a method call to a user-defined generator object. This is not fully supported.",
|
|
"Hints": [
|
|
"Set `torch._dynamo.config.enable_faithful_generator_behavior = False`. Note that this ",
|
|
"may cause silent incorrectness, since we will eagerly unpack generators instead of lazily ",
|
|
"evaluating them."
|
|
]
|
|
}
|
|
],
|
|
"GB0254": [
|
|
{
|
|
"Gb_type": "non-const setattr name on user-defined object",
|
|
"Context": "object={self}, name={name}, value={value}",
|
|
"Explanation": "Detected a call to `setattr` of a user-defined object with a non-constant name.",
|
|
"Hints": [
|
|
"Ensure that the name is a string."
|
|
]
|
|
}
|
|
],
|
|
"GB0255": [
|
|
{
|
|
"Gb_type": "attempted to call sourceless user-defined object as a method",
|
|
"Context": "object={self.value}, function={func}, args={args}, kwargs={kwargs}",
|
|
"Explanation": "Dynamo does not support this.",
|
|
"Hints": [
|
|
"Ensure the user-defined object {self.value} is constructed outside the compiled region."
|
|
]
|
|
}
|
|
],
|
|
"GB0256": [
|
|
{
|
|
"Gb_type": "User-defined object with non-function __getattr__",
|
|
"Context": "object={self.value}, name={name}, getattr_fn={getattr_fn}",
|
|
"Explanation": "Found a non-function __getattr__ {getattr_fn} from a user-defined object {self.value} when attempting to getattr `{name}`",
|
|
"Hints": [
|
|
"Ensure the object's __getattr__ is a function type."
|
|
]
|
|
}
|
|
],
|
|
"GB0257": [
|
|
{
|
|
"Gb_type": "TypedDict with optional keys",
|
|
"Context": "str(self.value)",
|
|
"Explanation": "Dyanmo does not support tracing TypedDict with optional keys",
|
|
"Hints": [
|
|
"Avoid using TypedDict with optional keys",
|
|
"It may be possible to write Dynamo tracing rules for this code. Please report an issue to PyTorch if you encounter this graph break often and it is causing performance issues."
|
|
]
|
|
}
|
|
],
|
|
"GB0258": [
|
|
{
|
|
"Gb_type": "collections.deque() with bad arguments",
|
|
"Context": "args={args}, kwargs={kwargs}",
|
|
"Explanation": "Detected call to collections.deque() with bad arguments.",
|
|
"Hints": [
|
|
"Fix the call to collections.deque().",
|
|
"Dynamo has detected that tracing the code will result in an error when running in eager. Please double check that your code doesn't contain a similar error when actually running eager/uncompiled."
|
|
]
|
|
}
|
|
],
|
|
"GB0259": [
|
|
{
|
|
"Gb_type": "collections.deque() with bad iterable argument",
|
|
"Context": "args={args}, kwargs={kwargs}",
|
|
"Explanation": "Call to collections.deque() has an iterable argument that Dynamo cannot convert to a list.",
|
|
"Hints": [
|
|
"Use a simpler sequence type that Dynamo can convert to a list ",
|
|
"(e.g. list, tuple, list iterator, etc.)",
|
|
"Dynamo has detected that tracing the code will result in an error when running in eager. Please double check that your code doesn't contain a similar error when actually running eager/uncompiled."
|
|
]
|
|
}
|
|
],
|
|
"GB0260": [
|
|
{
|
|
"Gb_type": "missing args to functools.partial",
|
|
"Context": "",
|
|
"Explanation": "functools.partial requires at least one argument",
|
|
"Hints": [
|
|
"Fix the functools.partial call.",
|
|
"Dynamo has detected that tracing the code will result in an error when running in eager. Please double check that your code doesn't contain a similar error when actually running eager/uncompiled."
|
|
]
|
|
}
|
|
],
|
|
"GB0261": [
|
|
{
|
|
"Gb_type": "User-defined object method with non-function __func__",
|
|
"Context": "object={self.value}, name={name}, method={dynamic_subobj}, method.__self__={dynamic_subobj.__self__}, method.__func__={dynamic_subobj.__func__}",
|
|
"Explanation": "Method {dynamic_subobj} (name={name}) of user-defined object {self.value} has a __func__ ({dynamic_subobj.__func__}) that is not a function type.",
|
|
"Hints": [
|
|
"Ensure that the method's __func__ is a function type."
|
|
]
|
|
}
|
|
],
|
|
"GB0262": [
|
|
{
|
|
"Gb_type": "unsupported contextlib.* API",
|
|
"Context": "{self.value}",
|
|
"Explanation": "{self.value} not supported. This may be due to its use of context-specific operations that are not supported in Dynamo yet (i.e. Exception handling)",
|
|
"Hints": [
|
|
"It may be possible to write Dynamo tracing rules for this code. Please report an issue to PyTorch if you encounter this graph break often and it is causing performance issues."
|
|
]
|
|
}
|
|
],
|
|
"GB0263": [
|
|
{
|
|
"Gb_type": "attempted to trace contextlib.contextmanager",
|
|
"Context": "args={args}",
|
|
"Explanation": "Tracing contextlib.contextmanager is disabled.",
|
|
"Hints": [
|
|
"Set torch._dynamo.config.enable_trace_contextlib = True"
|
|
]
|
|
}
|
|
],
|
|
"GB0264": [
|
|
{
|
|
"Gb_type": "Attempted to use `torch.nn.Parameter()` constructor with Dynamo",
|
|
"Context": "",
|
|
"Explanation": "Dynamo does not support this",
|
|
"Hints": [
|
|
"Try to construct `torch.nn.Parameter()` outside the compiled region.",
|
|
"If this is not possible, turn `graph_break_on_nn_param_ctor` off",
|
|
"It may be possible to write Dynamo tracing rules for this code. Please report an issue to PyTorch if you encounter this graph break often and it is causing performance issues."
|
|
]
|
|
}
|
|
],
|
|
"GB0265": [
|
|
{
|
|
"Gb_type": "FakeScriptObject missing method implementation",
|
|
"Context": "value={self.value}, method={name}",
|
|
"Explanation": "TorchScript object {self.value} doesn't define the method {name}.",
|
|
"Hints": [
|
|
"Ensure the method {name} is implemented in {self.value}.",
|
|
"Dynamo has detected that tracing the code will result in an error when running in eager. Please double check that your code doesn't contain a similar error when actually running eager/uncompiled."
|
|
]
|
|
}
|
|
],
|
|
"GB0266": [
|
|
{
|
|
"Gb_type": "Weird method call on TorchScript object",
|
|
"Context": "value={self.value}, method={name}",
|
|
"Explanation": "This particular method call ({name}) is not supported (e.g. calling `__setattr__`). Most method calls to TorchScript objects should be supported.",
|
|
"Hints": [
|
|
"Avoid calling this method."
|
|
]
|
|
}
|
|
],
|
|
"GB0267": [
|
|
{
|
|
"Gb_type": "Attempted to access non-callable attribute of TorchScript object",
|
|
"Context": "value={self.value}, method={name}",
|
|
"Explanation": "Attribute accesses of TorchScript objects to non-callable attributes are not supported.",
|
|
"Hints": [
|
|
"Use method calls instead of attribute access."
|
|
]
|
|
}
|
|
],
|
|
"GB0268": [
|
|
{
|
|
"Gb_type": "Unsupported kwargs for itertools.product",
|
|
"Context": "call_function {self} {args} {kwargs}",
|
|
"Explanation": "Expected kwargs: 'repeat', but got {','.join(set(kwargs.keys()) - {'repeat'})}",
|
|
"Hints": [
|
|
"Dynamo has detected that tracing the code will result in an error when running in eager. Please double check that your code doesn't contain a similar error when actually running eager/uncompiled."
|
|
]
|
|
}
|
|
],
|
|
"GB0269": [
|
|
{
|
|
"Gb_type": "Forced graph break on leaf function",
|
|
"Context": "",
|
|
"Explanation": "Forced graph break for nested graph break testing purposes",
|
|
"Hints": [
|
|
"Set torch._dynamo.config.debug_force_graph_break_on_leaf_return = False"
|
|
]
|
|
}
|
|
],
|
|
"GB0270": [
|
|
{
|
|
"Gb_type": "unimplemented builtin op vars() with no arguments",
|
|
"Context": "vars: {self} {args}",
|
|
"Explanation": "Dynamo does not know how to trace builtin operator {self.fn} with no arguments",
|
|
"Hints": [
|
|
"It may be possible to write Dynamo tracing rules for this code. Please report an issue to PyTorch if you encounter this graph break often and it is causing performance issues."
|
|
]
|
|
}
|
|
],
|
|
"GB0271": [
|
|
{
|
|
"Gb_type": "Class attribute mutation when the __dict__ was already materialized",
|
|
"Context": "str(self.value)",
|
|
"Explanation": "Dyanmo does not support tracing mutations on a class when its __dict__ is materialized",
|
|
"Hints": []
|
|
}
|
|
],
|
|
"GB0272": [
|
|
{
|
|
"Gb_type": "Failed to make weakref to User Object when storing by ID",
|
|
"Context": "user_objected: {obj}",
|
|
"Explanation": "Object does not allow us to make a weakref to it",
|
|
"Hints": []
|
|
},
|
|
{
|
|
"Gb_type": "Failed to make weakref to User Object",
|
|
"Context": "user_objected: {obj}",
|
|
"Explanation": "Object does not allow us to make a weakref to it",
|
|
"Hints": []
|
|
}
|
|
],
|
|
"GB0273": [
|
|
{
|
|
"Gb_type": "Keyword args passed to exception constructor",
|
|
"Context": "{self} with kwargs {init_kwargs}",
|
|
"Explanation": "Dynamo does not know how to handle keyword args passed to an exception constructor",
|
|
"Hints": [
|
|
"It may be possible to write Dynamo tracing rules for this code. Please report an issue to PyTorch if you encounter this graph break often and it is causing performance issues."
|
|
]
|
|
}
|
|
],
|
|
"GB0274": [
|
|
{
|
|
"Gb_type": "Attempted to reconstruct context manager's __enter__ method",
|
|
"Context": "str(self.ctx)",
|
|
"Explanation": "Attempted to reconstruct context manager {type_str} while tracing `with ...:`",
|
|
"Hints": [
|
|
"It is likely there is a graph break while tracing `with ctx:` ",
|
|
"but outside the actual `ctx.__enter__()` method. ",
|
|
"`torch.compile` does not expect this to happen.",
|
|
"This is likely to be a Dynamo bug. Please report an issue to PyTorch."
|
|
]
|
|
}
|
|
],
|
|
"GB0275": [
|
|
{
|
|
"Gb_type": "torch._dynamo.step_unsupported() with empty checkpoint",
|
|
"Context": "",
|
|
"Explanation": "traced torch._dynamo.step_unsupported(), but there is no checkpoint to step_graph_break from. This graph break is used for debugging only.",
|
|
"Hints": [
|
|
"Remove the torch._dynamo.step_unsupported() call.",
|
|
"Include at least one checkpoint: (1) include at least 2 ops and (2) make sure there is some ",
|
|
"line of code that is not in a try/with block, and has an empty Python stack.",
|
|
"This is likely to be a Dynamo bug. Please report an issue to PyTorch."
|
|
]
|
|
}
|
|
],
|
|
"GB0276": [
|
|
{
|
|
"Gb_type": "Failed to make weakref to User Object",
|
|
"Context": "user_object: {value}",
|
|
"Explanation": "Object does not allow us to make a weakref to it",
|
|
"Hints": []
|
|
}
|
|
],
|
|
"GB0277": [
|
|
{
|
|
"Gb_type": "Attempted to wrap sparse Tensor with VariableTracker",
|
|
"Context": "str(example_value)",
|
|
"Explanation": "torch.compile does not support sparse Tensors with VariableTracker",
|
|
"Hints": [
|
|
"It may be possible to write Dynamo tracing rules for this code. Please report an issue to PyTorch if you encounter this graph break often and it is causing performance issues."
|
|
]
|
|
}
|
|
],
|
|
"GB0278": [
|
|
{
|
|
"Gb_type": "Unsupported dict type for fromkeys()",
|
|
"Context": "{user_cls.__name__}.fromkeys(): {args} {kwargs}",
|
|
"Explanation": "Failed to call {user_cls.__name__}.fromkeys() because {user_cls.__name__} is not any type of dict, OrderedDict, or defaultdict",
|
|
"Hints": [
|
|
"Ensure {user_cls.__name__} is a type of dict, OrderedDict, or defaultdict."
|
|
]
|
|
}
|
|
]
|
|
}
|