While attempting to run the HEBO example with GPU, the following error message:
AttributeError Traceback (most recent call last)
Cell In[3], line 104
83 scheduler = AsyncHyperBandScheduler()
85 tuner = tune.Tuner(
86 easy_objective,
87 tune_config=tune.TuneConfig(
(...)
102 },
103 )
--> 104 results = tuner.fit()
105 print("Best hyperparameters found were: ", results.get_best_result().config)
File /usr/local/lib64/python3.9/site-packages/ray/tune/tuner.py:292, in Tuner.fit(self)
290 if not self._is_ray_client:
291 try:
--> 292 return self._local_tuner.fit()
293 except TuneError as e:
294 raise TuneError(
295 _TUNER_FAILED_MSG.format(
296 path=self._local_tuner.get_experiment_checkpoint_dir()
297 )
298 ) from e
File /usr/local/lib64/python3.9/site-packages/ray/tune/impl/tuner_internal.py:455, in TunerInternal.fit(self)
453 if not self._is_restored:
454 param_space = copy.deepcopy(self._param_space)
--> 455 analysis = self._fit_internal(trainable, param_space)
456 else:
457 analysis = self._fit_resume(trainable)
File /usr/local/lib64/python3.9/site-packages/ray/tune/impl/tuner_internal.py:572, in TunerInternal._fit_internal(self, trainable, param_space)
558 """Fitting for a fresh Tuner."""
559 args = {
560 **self._get_tune_run_arguments(trainable),
561 **dict(
(...)
570 **self._tuner_kwargs,
571 }
--> 572 analysis = run(
573 **args,
574 )
575 self.clear_remote_string_queue()
576 return analysis
File /usr/local/lib64/python3.9/site-packages/ray/tune/tune.py:574, in run(run_or_experiment, name, metric, mode, stop, time_budget_s, config, resources_per_trial, num_samples, local_dir, search_alg, scheduler, keep_checkpoints_num, checkpoint_score_attr, checkpoint_freq, checkpoint_at_end, verbose, progress_reporter, log_to_file, trial_name_creator, trial_dirname_creator, chdir_to_trial_dir, sync_config, export_formats, max_failures, fail_fast, restore, server_port, resume, reuse_actors, raise_on_failed_trial, callbacks, max_concurrent_trials, trial_executor, _experiment_checkpoint_dir, _remote, _remote_string_queue)
572 for i, exp in enumerate(experiments):
573 if not isinstance(exp, Experiment):
--> 574 experiments[i] = Experiment(
575 name=name,
576 run=exp,
577 stop=stop,
578 time_budget_s=time_budget_s,
579 config=config,
580 resources_per_trial=resources_per_trial,
581 num_samples=num_samples,
582 local_dir=local_dir,
583 _experiment_checkpoint_dir=_experiment_checkpoint_dir,
584 sync_config=sync_config,
585 checkpoint_config=checkpoint_config,
586 trial_name_creator=trial_name_creator,
587 trial_dirname_creator=trial_dirname_creator,
588 log_to_file=log_to_file,
589 export_formats=export_formats,
590 max_failures=max_failures,
591 restore=restore,
592 )
593 else:
594 logger.debug("Ignoring some parameters passed into tune.run.")
File /usr/local/lib64/python3.9/site-packages/ray/tune/experiment/experiment.py:188, in Experiment.__init__(self, name, run, stop, time_budget_s, config, resources_per_trial, num_samples, local_dir, _experiment_checkpoint_dir, sync_config, checkpoint_config, trial_name_creator, trial_dirname_creator, log_to_file, export_formats, max_failures, restore)
180 raise ValueError(
181 "'checkpoint_frequency' cannot be set for a function trainable. "
182 "You will need to report a checkpoint every "
(...)
185 "to get this behavior."
186 )
187 try:
--> 188 self._run_identifier = Experiment.register_if_needed(run)
189 except grpc.RpcError as e:
190 if e.code() == grpc.StatusCode.RESOURCE_EXHAUSTED:
File /usr/local/lib64/python3.9/site-packages/ray/tune/experiment/experiment.py:382, in Experiment.register_if_needed(cls, run_object)
380 name = cls.get_trainable_name(run_object)
381 try:
--> 382 register_trainable(name, run_object)
383 except (TypeError, PicklingError) as e:
384 extra_msg = (
385 "Other options: "
386 "\n-Try reproducing the issue by calling "
(...)
389 "the type annotations and try again."
390 )
File /usr/local/lib64/python3.9/site-packages/ray/tune/registry.py:111, in register_trainable(name, trainable, warn)
109 if not issubclass(trainable, Trainable):
110 raise TypeError("Second argument must be convertable to Trainable", trainable)
--> 111 _global_registry.register(TRAINABLE_CLASS, name, trainable)
File /usr/local/lib64/python3.9/site-packages/ray/tune/registry.py:199, in _Registry.register(self, category, key, value)
197 self._to_flush[(category, key)] = pickle.dumps_debug(value)
198 if _internal_kv_initialized():
--> 199 self.flush_values()
File /usr/local/lib64/python3.9/site-packages/ray/tune/registry.py:222, in _Registry.flush_values(self)
219 def flush_values(self):
220 for (category, key), value in self._to_flush.items():
221 _internal_kv_put(
--> 222 _make_key(self._prefix, category, key), value, overwrite=True
223 )
224 self._to_flush.clear()
File /usr/local/lib64/python3.9/site-packages/ray/tune/registry.py:176, in _make_key(prefix, category, key)
159 def _make_key(prefix: str, category: str, key: str):
160 """Generate a binary key for the given category and key.
161
162 Args:
(...)
168 The key to use for storing a the value.
169 """
170 return (
171 b"TuneRegistry:"
172 + prefix.encode("ascii")
173 + b":"
174 + category.encode("ascii")
175 + b"/"
--> 176 + key.encode("ascii")
177 )
AttributeError: 'NoneType' object has no attribute 'encode'
I did not modify the example code in any way. Would appreciate some help as I’m a newcomer to Ray!