@@ -66,7 +66,7 @@ def display(img):
66
66
######################################################################
67
67
# ``crop`` is not handled effectively out-of-the-box by
68
68
# ``torch.compile``: ``torch.compile`` induces a
69
- # `"graph break" <https://pytorch.org/docs/stable/torch.compiler_faq.html#graph-breaks>`_
69
+ # `"graph break" <https://pytorch.org/docs/stable/torch.compiler_faq.html#graph-breaks>`_
70
70
# on functions it is unable to handle and graph breaks are bad for performance.
71
71
# The following code demonstrates this by raising an error
72
72
# (``torch.compile`` with ``fullgraph=True`` raises an error if a
@@ -85,9 +85,9 @@ def f(img):
85
85
#
86
86
# 1. wrap the function into a PyTorch custom operator.
87
87
# 2. add a "``FakeTensor`` kernel" (aka "meta kernel") to the operator.
88
- # Given the metadata (e.g. shapes)
89
- # of the input Tensors, this function says how to compute the metadata
90
- # of the output Tensor(s ).
88
+ # Given some ``FakeTensors`` inputs (dummy Tensors that don't have storage),
89
+ # this function should return dummy Tensors of your choice with the correct
90
+ # Tensor metadata (shape/strides/``dtype``/device ).
91
91
92
92
93
93
from typing import Sequence
@@ -130,6 +130,11 @@ def f(img):
130
130
# ``autograd.Function`` with PyTorch operator registration APIs can lead to (and
131
131
# has led to) silent incorrectness when composed with ``torch.compile``.
132
132
#
133
+ # If you don't need training support, there is no need to use
134
+ # ``torch.library.register_autograd``.
135
+ # If you end up training with a ``custom_op`` that doesn't have an autograd
136
+ # registration, we'll raise an error message.
137
+ #
133
138
# The gradient formula for ``crop`` is essentially ``PIL.paste`` (we'll leave the
134
139
# derivation as an exercise to the reader). Let's first wrap ``paste`` into a
135
140
# custom operator:
@@ -203,7 +208,7 @@ def setup_context(ctx, inputs, output):
203
208
######################################################################
204
209
# Mutable Python Custom operators
205
210
# -------------------------------
206
- # You can also wrap a Python function that mutates its inputs into a custom
211
+ # You can also wrap a Python function that mutates its inputs into a custom
207
212
# operator.
208
213
# Functions that mutate inputs are common because that is how many low-level
209
214
# kernels are written; for example, a kernel that computes ``sin`` may take in
0 commit comments