I'm assuming this is related to this thread:
None
Hi, i tried to build my own image and uploaded it to harbor in order to use it as an image for pipelines and tasks, but the tasks didn't use the packages from it.
Dockefile:
FROM nvidia/cuda:12.4.0-devel-ubuntu22.04
ENV TZ=Europe/Moscow
RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone
WORKDIR /workspace
RUN apt-get update && apt-get install -y
software-properties-common
curl &&
add-apt-repository ppa:deadsnakes/ppa -y &&
apt-get update && apt-get install -y
python3.11
python3.11-venv
python3.11-dev &&
rm -rf /var/lib/apt/lists/*
RUN curl -sS None | python3.11
RUN update-alternatives --install /usr/bin/python3 python3 /usr/bin/python3.11 1
RUN python3.11 -m venv /opt/venv
ENV VIRTUAL_ENV=/opt/venv
ENV PATH="$VIRTUAL_ENV/bin:$PATH"
COPY . .
RUN pip install --upgrade pip
RUN pip install torch --index-url None
RUN pip install clearml --index-url None
RUN pip install -r requirements.txt --index-url None
code snippet:
pipe = PipelineController(
name =config["clear_ml"]["pipeline_name"],
project =config["clear_ml"]["project_name"],
version ="0.1",
add_pipeline_tags =False,
repo =config["clear_ml"]["repo"],
docker =config["clear_ml"]["docker"],
packages =config["clear_ml"]["packages"]
)
pipe.set_default_execution_queue("default")
pipe.add_function_step(
name ="load_data",
function =load_data,
function_kwargs =dict( config =config["data"]),
function_return =["train", "test"],
docker =config["clear_ml"]["docker"],
packages =config["clear_ml"]["packages"],
cache_executed_step =True
)
Problem: clearml dont use installed in image packages, it installs packages on his venv. How i can use packages from docker image?