xgboost_ray icon indicating copy to clipboard operation
xgboost_ray copied to clipboard

RuntimeError: Tune is not installed, so `get_tune_resources` is not supported

Open NumberChiffre opened this issue 9 months ago • 0 comments

The example for Hyperparameter Tuning with the following code produced the above error:

Error:

RuntimeError: Tune is not installed, so `get_tune_resources` is not supported. You can install Ray Tune via `pip install ray[tune]`.
---------------------------------------------------------------------------
RuntimeError                              Traceback (most recent call last)
File <command-1840255955610235>:44
     28 config = {
     29     "tree_method": "approx",
     30     "objective": "binary:logistic",
   (...)
     34     "max_depth": tune.randint(1, 9)
     35 }
     37 # Make sure to use the `get_tune_resources` method to set the `resources_per_trial`
     38 analysis = tune.run(
     39     train_model,
     40     config=config,
     41     metric="train-error",
     42     mode="min",
     43     num_samples=4,
---> 44     resources_per_trial=ray_params.get_tune_resources())
     45 print("Best hyperparameters", analysis.best_config)

File /local_disk0/.ephemeral_nfs/envs/pythonEnv-a7c4ad70-7964-4bea-86d4-19be06ce626e/lib/python3.9/site-packages/xgboost_ray/main.py:484, in RayParams.get_tune_resources(self)
    480 if self.cpus_per_actor <= 0 or self.num_actors <= 0:
    481     raise ValueError(
    482         "num_actors and cpus_per_actor both must be " "greater than 0."
    483     )
--> 484 return _get_tune_resources(
    485     num_actors=self.num_actors,
    486     cpus_per_actor=self.cpus_per_actor,
    487     gpus_per_actor=max(0, self.gpus_per_actor),
    488     resources_per_actor=self.resources_per_actor,
    489     placement_options=self.placement_options,
    490 )

File /local_disk0/.ephemeral_nfs/envs/pythonEnv-a7c4ad70-7964-4bea-86d4-19be06ce626e/lib/python3.9/site-packages/xgboost_ray/tune.py:180, in _get_tune_resources(num_actors, cpus_per_actor, gpus_per_actor, resources_per_actor, placement_options)
    178     return placement_group_factory
    179 else:
--> 180     raise RuntimeError(
    181         "Tune is not installed, so `get_tune_resources` is "
    182         "not supported. You can install Ray Tune via `pip "
    183         "install ray[tune]`."
    184     )

RuntimeError: Tune is not installed, so `get_tune_resources` is not supported. You can install Ray Tune via `pip install ray[tune]`.

Code

from xgboost_ray import RayDMatrix, RayParams, train
from sklearn.datasets import load_breast_cancer

num_actors = 4
num_cpus_per_actor = 1

ray_params = RayParams(
    num_actors=num_actors,
    cpus_per_actor=num_cpus_per_actor)

def train_model(config):
    train_x, train_y = load_breast_cancer(return_X_y=True)
    train_set = RayDMatrix(train_x, train_y)

    evals_result = {}
    bst = train(
        params=config,
        dtrain=train_set,
        evals_result=evals_result,
        evals=[(train_set, "train")],
        verbose_eval=False,
        ray_params=ray_params)
    bst.save_model("model.xgb")

from ray import tune

# Specify the hyperparameter search space.
config = {
    "tree_method": "approx",
    "objective": "binary:logistic",
    "eval_metric": ["logloss", "error"],
    "eta": tune.loguniform(1e-4, 1e-1),
    "subsample": tune.uniform(0.5, 1.0),
    "max_depth": tune.randint(1, 9)
}

# Make sure to use the `get_tune_resources` method to set the `resources_per_trial`
analysis = tune.run(
    train_model,
    config=config,
    metric="train-error",
    mode="min",
    num_samples=4,
    resources_per_trial=ray_params.get_tune_resources())
print("Best hyperparameters", analysis.best_config)

NumberChiffre avatar May 13 '24 18:05 NumberChiffre