mirror of
https://github.com/deepspeedai/DeepSpeed.git
synced 2025-10-20 15:33:51 +08:00
Add defence for DeepCompile w/o optimizer (#7225)
Similar to #7211
When the optimizer is not specified, the optimizer will be type
`DeepSpeedZeRoOffload` instead of `DeepSpeedZeroOptimizer_Stage3` (e.g.
for ZeRO-3 pure inference), while `DeepSpeedZeRoOffload` doesn't have
`parameter_offload`.
56005d2b25/deepspeed/runtime/engine.py (L1684-L1707)
```log
File "deepspeed/runtime/engine.py", line 3919, in compile
backend = init_z3(self, backend, compile_config, compile_kwargs, schedule)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "deepspeed/compile/init_z3.py", line 36, in init_z3
optimizer.parameter_offload._remove_module_hooks()
^^^^^^^^^^^^^^^^^^^^^^^^^^^
AttributeError: 'DeepSpeedZeRoOffload' object has no attribute 'parameter_offload'
```
---------
Signed-off-by: Hollow Man <hollowman@opensuse.org>
Signed-off-by: Logan Adams <loadams@microsoft.com>
Co-authored-by: Masahiro Tanaka <81312776+tohtana@users.noreply.github.com>
Co-authored-by: Logan Adams <loadams@microsoft.com>
This commit is contained in:
@ -3896,6 +3896,9 @@ class DeepSpeedEngine(Module):
|
||||
or self.zero_optimization_stage() == ZeroStageEnum.weights \
|
||||
, "Currently DeepCompile supports stage 1 or 3 only."
|
||||
|
||||
assert not isinstance(self.optimizer,
|
||||
DeepSpeedZeRoOffload), "Currently DeepCompile is not supported without an optimizer."
|
||||
|
||||
if schedule is not None:
|
||||
|
||||
def passes_name_to_fn(passes):
|
||||
|
Reference in New Issue
Block a user