"tests/pipelines/test_pipelines_flax.py" did not exist on "af279434d03e6e3be7808ecd15c652338b31024b"
Commit a7d6cb2d authored by Benjamin Fattori's avatar Benjamin Fattori
Browse files

record _rank/_world_size for single GPU

parent eddd627a
......@@ -44,6 +44,8 @@ class HFLM(LM):
if torch.cuda.is_available()
else torch.device("cpu")
)
self._rank = 0
self._world_size = 1
else:
self._device = 'cpu'
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment