mirror of
https://github.com/pytorch/pytorch.git
synced 2025-10-20 21:14:14 +08:00
Rewrite existing links to custom ops gdocs with the landing page (#127423)
NB: these links will be live after the docs build happens, which is once a day. Test Plan: - existing tests Pull Request resolved: https://github.com/pytorch/pytorch/pull/127423 Approved by: https://github.com/jansel, https://github.com/williamwen42 ghstack dependencies: #127291, #127292, #127400
This commit is contained in:
@ -16,8 +16,8 @@ static void metaFallback(
|
||||
"fake impl or Meta kernel registered. You may have run into this message "
|
||||
"while using an operator with PT2 compilation APIs (torch.compile/torch.export); "
|
||||
"in order to use this operator with those APIs you'll need to add a fake impl. "
|
||||
"Please see the following doc for next steps: "
|
||||
"https://docs.google.com/document/d/1_W62p8WJOQQUzPsJYa7s701JXt0qf2OfLub2sbkHOaU/edit");
|
||||
"Please see the following for next steps: "
|
||||
"https://pytorch.org/docs/main/notes/custom_operators.html");
|
||||
}
|
||||
|
||||
TORCH_LIBRARY_IMPL(_, Meta, m) {
|
||||
|
||||
@ -18,7 +18,7 @@ void throwNullDataPtrError() {
|
||||
"If you're using torch.compile/export/fx, it is likely that we are erroneously "
|
||||
"tracing into a custom kernel. To fix this, please wrap the custom kernel into "
|
||||
"an opaque custom op. Please see the following for details: "
|
||||
"https://docs.google.com/document/d/1W--T6wz8IY8fOI0Vm8BF44PdBgs283QvpelJZWieQWQ");
|
||||
"https://pytorch.org/docs/main/notes/custom_operators.html");
|
||||
}
|
||||
|
||||
// NOTE: [FakeTensor.data_ptr deprecation]
|
||||
|
||||
@ -1580,7 +1580,7 @@ struct C10_API TensorImpl : public c10::intrusive_ptr_target {
|
||||
"If you're using torch.compile/export/fx, it is likely that we are erroneously "
|
||||
"tracing into a custom kernel. To fix this, please wrap the custom kernel into "
|
||||
"an opaque custom op. Please see the following for details: "
|
||||
"https://docs.google.com/document/d/1W--T6wz8IY8fOI0Vm8BF44PdBgs283QvpelJZWieQWQ\n"
|
||||
"https://pytorch.org/docs/main/notes/custom_operators.html\n"
|
||||
"If you're using Caffe2, Caffe2 uses a lazy allocation, so you will need to call "
|
||||
"mutable_data() or raw_mutable_data() to actually allocate memory.");
|
||||
// Caller does the type check.
|
||||
|
||||
@ -1676,7 +1676,7 @@ err_epilogue = (
|
||||
"(and fall back to eager-mode PyTorch) on all ops "
|
||||
"that have do not have the 'pt2_compliant_tag'. "
|
||||
"Please see the following doc for how to mark this op as PT2 compliant "
|
||||
"https://docs.google.com/document/d/1W--T6wz8IY8fOI0Vm8BF44PdBgs283QvpelJZWieQWQ"
|
||||
"https://pytorch.org/docs/main/notes/custom_operators.html"
|
||||
)
|
||||
|
||||
|
||||
|
||||
@ -556,7 +556,7 @@ def register_fake(
|
||||
This API may be used as a decorator (see examples).
|
||||
|
||||
For a detailed guide on custom ops, please see
|
||||
https://docs.google.com/document/d/1W--T6wz8IY8fOI0Vm8BF44PdBgs283QvpelJZWieQWQ/edit
|
||||
https://pytorch.org/docs/main/notes/custom_operators.html
|
||||
|
||||
Examples:
|
||||
>>> import torch
|
||||
|
||||
Reference in New Issue
Block a user