mirror of
https://github.com/huggingface/transformers.git
synced 2025-10-20 17:13:56 +08:00
Fix typo in Language Modeling example scripts and update TPU type (#38652)
* Fix typo that prevents the examples to be run correctly * return .TPU in accelerator.distributedtype comparison
This commit is contained in:
committed by
GitHub
parent
8ff22e9d3b
commit
cb4c56ce0d
@ -521,7 +521,7 @@ def main():
|
||||
|
||||
# Get the factor by which the embedding layer should be padded based on the device
|
||||
pad_factor = 1
|
||||
if torch.cuda.is_availble():
|
||||
if torch.cuda.is_available():
|
||||
pad_factor = 8
|
||||
|
||||
elif is_torch_xla_available(check_is_tpu=True):
|
||||
|
@ -488,7 +488,7 @@ def main():
|
||||
|
||||
# Get the factor by which the embedding layer should be padded based on the device
|
||||
pad_factor = 1
|
||||
if torch.cuda.is_availble():
|
||||
if torch.cuda.is_available():
|
||||
pad_factor = 8
|
||||
|
||||
elif is_torch_xla_available(check_is_tpu=True):
|
||||
|
Reference in New Issue
Block a user