ollama-inference / Dockerfile
zhengr's picture
Update Dockerfile
095654c verified
raw
history blame contribute delete
627 Bytes
# Use the official Ollama Docker image as the base image
FROM ollama/ollama:latest
RUN apt update && apt install -y python3 && apt install -y python3-pip
# Create a directory for Ollama data
RUN mkdir -p /.ollama
RUN chmod -R 777 /.ollama
WORKDIR /.ollama
# Copy the entry point script
COPY entrypoint.sh /entrypoint.sh
RUN chmod +x /entrypoint.sh
# Set the entry point script as the default command
ENTRYPOINT ["/entrypoint.sh"]
# Set the model as an environment variable (this can be overridden)
ENV model=${model}
# Expose the port that Ollama runs on
EXPOSE 7860
# Command to start the Ollama server
CMD ["serve"]