Unanswered
Hello Everyone,
When Performing Hyperparameter Optimization, Is There A Way To Set An
@<1523701070390366208:profile|CostlyOstrich36>
from clearml import Task
from clearml.automation import HyperParameterOptimizer, UniformIntegerParameterRange, DiscreteParameterRange
task = Task.init(
project_name="examples",
task_name="HP optimizer",
task_type=Task.TaskTypes.optimizer,
reuse_last_task_id=False,
)
task.execute_remotely(queue_name="services")
an_optimizer = HyperParameterOptimizer(
base_task_id="c7618e30ff5c4955b4942971b410f72d",
hyper_parameters=[
DiscreteParameterRange("General/checkpoint", values=['5a6bea85858241f79a08ef23556e9e37',
'f64fb9d419d14f11b74d17ebd2c38fd9']),
DiscreteParameterRange("General/dataset_id", values=['0c5af65c496d45e591e3c475591764dd']),
UniformIntegerParameterRange("General/seed", min_value=1, max_value=100),
],
objective_metric_title="Summary",
objective_metric_series="train_accuracy",
objective_metric_sign="max",
execution_queue="rtx3090",
total_max_jobs=3,
save_top_k_tasks_only=2,
spawn_project="examples/hpo1",
max_number_of_concurrent_tasks=2
)
an_optimizer.start()
an_optimizer.wait()
an_optimizer.stop()
85 Views
0
Answers
8 months ago
8 months ago