mirror of
https://github.com/pytorch/pytorch.git
synced 2025-10-20 21:14:14 +08:00
Fix typo under torch/csrc/jit/runtime directory (#97243)
This PR fixes typo in comments and messages under `torch/csrc/jit/runtime` directory. Pull Request resolved: https://github.com/pytorch/pytorch/pull/97243 Approved by: https://github.com/davidberard98
This commit is contained in:
committed by
PyTorch MergeBot
parent
1f71ac785c
commit
d70f9c7888
@ -1251,7 +1251,7 @@ void BlockRunner::Deallocator::cleanupImpl() {
|
||||
block_runner_.planner_->deallocate();
|
||||
} else {
|
||||
// This is the first run, and it didn't finish, so we can't use a
|
||||
// `MemoryPlanner` to deallocate stuff. Just reset everything mannually.
|
||||
// `MemoryPlanner` to deallocate stuff. Just reset everything manually.
|
||||
block_runner_.resetMemory();
|
||||
}
|
||||
// clean up owning refs of input tensors
|
||||
@ -1712,7 +1712,7 @@ BlockRunner::IndividualMetrics BlockRunner::benchmark_individual_ops(
|
||||
results.setup_time = timer.MilliSeconds();
|
||||
|
||||
// The first iteration profiles each node's output Tensors' sizes and
|
||||
// initializes the memory planner with the profile information. Folllowing
|
||||
// initializes the memory planner with the profile information. Following
|
||||
// iterations just use the already established memory planning.
|
||||
timer.Start();
|
||||
operator()(args_list[0], is_kwargs_empty ? empty_kwargs : kwargs_list[0]);
|
||||
|
||||
Reference in New Issue
Block a user