We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent ff57e9e commit a987adfCopy full SHA for a987adf
model_servers/llamacpp_python/cuda/Containerfile
@@ -1,7 +1,10 @@
1
FROM quay.io/opendatahub/workbench-images:cuda-ubi9-python-3.9-20231206
2
+USER root
3
+RUN dnf install -y gcc-toolset-13-gcc gcc-toolset-13-gcc-c++
4
+USER 1001
5
WORKDIR /locallm
6
COPY src .
7
ENV CMAKE_ARGS="-DLLAMA_CUBLAS=on -DLLAMA_AVX2=OFF -DLLAMA_FMA=OFF -DLLAMA_F16C=OFF"
8
ENV FORCE_CMAKE=1
-RUN pip install --no-cache-dir -r ./requirements.txt
9
+RUN CC="/opt/rh/gcc-toolset-13/root/usr/bin/gcc" CXX="/opt/rh/gcc-toolset-13/root/usr/bin/g++" pip install --no-cache-dir -r ./requirements.txt
10
ENTRYPOINT [ "sh", "run.sh" ]
0 commit comments