Compare commits

...

5 Commits

Author SHA1 Message Date
a5b8811bfa Release: v0.6.2 2022-03-31 09:24:08 -04:00
54a685a92b Leave default as None (#300) 2022-03-31 09:23:25 -04:00
8bc6c83175 Patch release: v0.6.1 2022-03-18 17:41:43 -04:00
211e6555fa Fix breaking change 2022-03-18 17:40:32 -04:00
a5b782b0a1 v0.7.0.dev0 2022-03-18 09:40:22 -04:00
5 changed files with 11 additions and 6 deletions

View File

@ -30,7 +30,7 @@ extras["sagemaker"] = [
setup(
name="accelerate",
version="0.6.0",
version="0.6.2",
description="Accelerate",
long_description=open("README.md", "r", encoding="utf-8").read(),
long_description_content_type="text/markdown",
@ -78,8 +78,9 @@ setup(
# twine upload dist/* -r pypitest --repository-url=https://test.pypi.org/legacy/
# 6. Check that you can install it in a virtualenv by running:
# pip install -i https://testpypi.python.org/pypi accelerate
# accelerate env
# accelerate test
# 7. Upload the final version to actual pypi:
# twine upload dist/* -r pypi
# 8. Add release notes to the tag in github once everything is looking hunky-dory.
# 9. Add the release version to docs/source/_static/js/custom.js and .github/deploy_doc.sh
# 10. Update the version in __init__.py, setup.py to the new version "-dev" and push to master
# 9. Update the version in __init__.py, setup.py to the new version "-dev" and push to master

View File

@ -2,7 +2,7 @@
# There's no way to ignore "F401 '...' imported but unused" warnings in this
# module, but to preserve other warnings. So, don't check this module at all.
__version__ = "0.6.0"
__version__ = "0.6.2"
from .accelerator import Accelerator
from .kwargs_handlers import DistributedDataParallelKwargs, GradScalerKwargs, InitProcessGroupKwargs

View File

@ -478,7 +478,7 @@ class Accelerator:
The optimizer(s) for which to unscale gradients. If not set, will unscale gradients on all optimizers
that were passed to [`~Accelerator.prepare`].
"""
if self.state.use_fp16 and self.native_amp:
if self.use_fp16 and self.native_amp:
if optimizer is None:
# TODO: this unscales all optimizers where we should only unscale the one where parameters are.
optimizer = self._optimizers

View File

@ -56,7 +56,6 @@ def launch_command_parser(subparsers=None):
)
parser.add_argument(
"--mixed_precision",
default="no",
type=str,
choices=["no", "fp16", "bf16"],
help="Whether or not to use mixed precision training. "

View File

@ -268,3 +268,8 @@ class AcceleratorState:
if self.distributed_type == DistributedType.DEEPSPEED:
repr += f"ds_config: {self.deepspeed_plugin.deepspeed_config}\n"
return repr
# For backward compatibility
@property
def use_fp16(self):
return self.mixed_precision != "no"