matthoffner commited on
Commit
84c9a13
·
1 Parent(s): f10b3f0

Update Dockerfile

Browse files
Files changed (1) hide show
  1. Dockerfile +3 -2
Dockerfile CHANGED
@@ -1,10 +1,11 @@
1
  FROM nvidia/cuda:11.6.0-base-ubuntu20.04
2
  WORKDIR /app
 
3
  RUN apt update && \
4
  apt install --no-install-recommends -y build-essential python3 python3-pip wget && \
5
  apt clean && rm -rf /var/lib/apt/lists/*
6
 
7
- RUN export PATH="/usr/local/cuda/bin:$PATH"
8
  RUN wget -qO- "https://cmake.org/files/v3.17/cmake-3.17.0-Linux-x86_64.tar.gz" | tar --strip-components=1 -xz -C /usr/local
9
  RUN CMAKE_ARGS="-DLLAMA_CUBLAS=on" FORCE_CMAKE=1 pip install llama-cpp-python --no-cache-dir
10
  COPY requirements.txt ./
@@ -20,4 +21,4 @@ COPY --chown=user . $HOME/app
20
 
21
  RUN ls -al
22
 
23
- CMD ["python", "app.py", "--host", "127.0.0.1", "--port", "7860"]
 
1
  FROM nvidia/cuda:11.6.0-base-ubuntu20.04
2
  WORKDIR /app
3
+ RUN export PATH="/usr/local/cuda/bin:$PATH"
4
  RUN apt update && \
5
  apt install --no-install-recommends -y build-essential python3 python3-pip wget && \
6
  apt clean && rm -rf /var/lib/apt/lists/*
7
 
8
+
9
  RUN wget -qO- "https://cmake.org/files/v3.17/cmake-3.17.0-Linux-x86_64.tar.gz" | tar --strip-components=1 -xz -C /usr/local
10
  RUN CMAKE_ARGS="-DLLAMA_CUBLAS=on" FORCE_CMAKE=1 pip install llama-cpp-python --no-cache-dir
11
  COPY requirements.txt ./
 
21
 
22
  RUN ls -al
23
 
24
+ CMD ["python3", "app.py", "--host", "127.0.0.1", "--port", "7860"]