Hi @<1523701070390366208:profile|CostlyOstrich36>
here is the full code:
import os
import sys,shutil
import clearml
from clearml import Task, Dataset, Logger
from clearml import PipelineDecorator, PipelineController
project_name = "Titanic Project"
dataset_name = "titanic_data"
datasets = Dataset.list_datasets()
for dataset in datasets:
if dataset["project"] == project_name and dataset["name"] == dataset_name:
parent_datasets_id = dataset["id"]
print(parent_datasets_id)
dso = Dataset.create(
dataset_project= project_name,
dataset_name= dataset_name,
parent_datasets=[parent_datasets_id],
)
dso = Dataset.get(
dataset_project= project_name,
dataset_name= dataset_name,
only_completed=True,
only_published=False,
alias='latest',
)
if os.path.exists("./data"):
shutil.rmtree("./data")
local_path = dso.get_mutable_local_copy("./data")
print(local_path)
with open("./data/titanic.csv","a+") as fh:
fh.write('\n885,0,3,"Sutehaasll, Mr. Henry Jr.",male,45,0,0,SOTON/OQ 392076,7.05,,S\n')
op = dso.sync_folder(local_path="./data",verbose=True)
print(op)
dso.finalize(auto_upload=True,verbose=True)