Hi, Can you please elaborate on what you mean and what is happening?
Can you please provide a stand alone snippet that reproduces this behavior? Can you provide a log of the run?
Hello. Please tell me how to make sure that when you start the pipeline, nothing superfluous is installed in the service queue?
@PipelineDecorator.pipeline(
project=CLEARML_CONFIG.pipeline.project_name,
name=CLEARML_CONFIG.pipeline.pipeline_name,
)
def executing_pipeline(dataset_id: str):
print(f"{dataset_id = }")
weights_path = train_step(dataset_id)
print(f"{weights_path = }")
evaluate_step(weights_path)
@PipelineDecorator.component(
execution_queue=CLEARML_CONFIG.queue,
docker=CLEARML_CONFIG.docker_image,
docker_args=CLEARML_CONFIG.docker_arguments,
return_values=["weights_path"],
cache=False,
task_type=TaskTypes.training,
repo="",
repo_branch=BRANCH,
)
def train_step(dataset_id: str):
...
return weights_path
@PipelineDecorator.component(
execution_queue=CLEARML_CONFIG.queue,
cache=False,
task_type=TaskTypes.testing,
docker=CLEARML_CONFIG.docker_image,
docker_args=CLEARML_CONFIG.docker_arguments,
parents=["train_step"],
repo="",
repo_branch=BRANCH,
)
def evaluate_step(weights_path):
...
if __name__ == "__main__":
PipelineDecorator.set_default_execution_queue("services")
executing_pipeline(dataset_id="...")
Hi, Can you please elaborate on what you mean and what is happening?
Can you please provide a stand alone snippet that reproduces this behavior? Can you provide a log of the run?