AMOP / Dockerfile
broadfield-dev's picture
Create Dockerfile
16fb5fb verified
raw
history blame
982 Bytes
# Use a standard Python base image
FROM python:3.10-slim
# Set the working directory inside the container
WORKDIR /code
# Install system dependencies needed for git and building C++ code
RUN apt-get update && apt-get install -y --no-install-recommends \
git \
build-essential \
cmake \
&& rm -rf /var/lib/apt/lists/*
# Clone the llama.cpp repository
RUN git clone https://github.com/ggerganov/llama.cpp.git
# Install Python dependencies for llama.cpp first
RUN pip install --no-cache-dir -r llama.cpp/requirements.txt
# Build the llama.cpp binaries
WORKDIR /code/llama.cpp
RUN cmake .
RUN cmake --build .
# Go back to the root directory
WORKDIR /code
# Copy your application's requirements.txt and install its dependencies
COPY requirements.txt .
RUN pip install --no-cache-dir -r requirements.txt
# Copy the rest of your application files
COPY . .
# Expose the port Gradio runs on
EXPOSE 7860
# The command to run your application
CMD ["python", "app.py"]