I have three workers, and I dont have optimizer configured. Just the default. It throws this error after about 5 minutes of running. Since version 2.43.0, there are lots of changes in the code, I may probably miss a few tips or tricks. Anyone can help?
Exception in thread _LearnerThread:
Traceback (most recent call last):
File “/lib/python3.12/threading.py”, line 1075, in _bootstrap_inner
self.run()
File “/lib/python3.12/site-packages/ray/rllib/algorithms/impala/impala_learner.py”, line 293, in run
self.step()
File “/lib/python3.12/site-packages/ray/rllib/algorithms/impala/impala_learner.py”, line 321, in step
self._update_method(
File “/lib/python3.12/site-packages/ray/util/tracing/tracing_helper.py”, line 463, in _resume_span
return method(self, *_args, **_kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File “/lib/python3.12/site-packages/ray/rllib/core/learner/learner.py”, line 1455, in _update_from_batch_or_episodes
fwd_out, loss_per_module, tensor_metrics = self._update(
^^^^^^^^^^^^^
File “/lib/python3.12/site-packages/ray/util/tracing/tracing_helper.py”, line 463, in _resume_span
return method(self, *_args, **_kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File “/lib/python3.12/site-packages/ray/rllib/core/learner/torch/torch_learner.py”, line 504, in _update
return self._possibly_compiled_update(batch)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File “/lib/python3.12/site-packages/ray/util/tracing/tracing_helper.py”, line 463, in _resume_span
return method(self, *_args, **_kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File “/lib/python3.12/site-packages/ray/rllib/core/learner/torch/torch_learner.py”, line 163, in _uncompiled_update
self.apply_gradients(postprocessed_gradients)
File “/lib/python3.12/site-packages/ray/util/tracing/tracing_helper.py”, line 463, in _resume_span
return method(self, *_args, **_kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File “/lib/python3.12/site-packages/ray/rllib/core/learner/torch/torch_learner.py”, line 259, in apply_gradients
optim.step()
File “/lib/python3.12/site-packages/torch/optim/optimizer.py”, line 493, in wrapper
out = func(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^
File “/lib/python3.12/site-packages/torch/optim/optimizer.py”, line 91, in _use_grad
ret = func(self, *args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^
File “/lib/python3.12/site-packages/torch/optim/adam.py”, line 244, in step
adam(
File “/lib/python3.12/site-packages/torch/optim/optimizer.py”, line 154, in maybe_fallback
return func(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^
File “/lib/python3.12/site-packages/torch/optim/adam.py”, line 876, in adam
func(
File “/lib/python3.12/site-packages/torch/optim/adam.py”, line 516, in _multi_tensor_adam
raise ValueError(
ValueError: beta1 as a Tensor is not supported for capturable=False and foreach=True