FROM python:latest ENV PYTHONUNBUFFERED 1 EXPOSE 8000 WORKDIR /app RUN export PATH="/usr/local/cuda/bin:$PATH" RUN wget -qO- "https://cmake.org/files/v3.17/cmake-3.17.0-Linux-x86_64.tar.gz" | tar --strip-components=1 -xz -C /usr/local RUN CMAKE_ARGS="-DLLAMA_CUBLAS=on" FORCE_CMAKE=1 pip install llama-cpp-python --no-cache-dir COPY requirements.txt ./ RUN pip install --upgrade pip && \ pip install -r requirements.txt COPY . . RUN ls -al CMD ["gradio", "app.py", "--host", "0.0.0.0", "--port", "8000"]