# CPU .env docs: https://localai.io/howtos/easy-setup-docker-cpu/ | |
# GPU .env docs: https://localai.io/howtos/easy-setup-docker-gpu/ | |
# Create an app-level token with connections:write scope | |
SLACK_APP_TOKEN=xapp-1-... | |
# Install the app into your workspace to grab this token | |
SLACK_BOT_TOKEN=xoxb-... | |
# Set this to a random string, it doesn't matter, however if present the python library complains | |
OPENAI_API_KEY=sk-foo-bar-baz | |
# Optional: gpt-3.5-turbo and gpt-4 are currently supported (default: gpt-3.5-turbo) | |
OPENAI_MODEL=gpt-3.5-turbo | |
# Optional: You can adjust the timeout seconds for OpenAI calls (default: 30) | |
OPENAI_TIMEOUT_SECONDS=560 | |
MEMORY_DIR=/tmp/memory_dir | |
OPENAI_API_BASE=http://api:8080/v1 | |
EMBEDDINGS_MODEL_NAME=all-MiniLM-L6-v2 | |
## Repository and sitemap to index in the vector database on start | |
SITEMAP="https://kairos.io/sitemap.xml" | |
# Optional repository names. | |
# REPOSITORIES="foo,bar" | |
# # Define clone URL for "foo" | |
# foo_CLONE_URL="http://github.com.." | |
# bar_CLONE_URL="..." | |
# # Define branch for foo | |
# foo_BRANCH="master" | |
# Optional token if scraping issues | |
# GITHUB_PERSONAL_ACCESS_TOKEN="" | |
# ISSUE_REPOSITORIES="go-skynet/LocalAI,foo/bar,..." | |
# Optional: When the string is "true", this app translates ChatGPT prompts into a user's preferred language (default: true) | |
USE_SLACK_LANGUAGE=true | |
# Optional: Adjust the app's logging level (default: DEBUG) | |
SLACK_APP_LOG_LEVEL=INFO | |
# Optional: When the string is "true", translate between OpenAI markdown and Slack mrkdwn format (default: false) | |
TRANSLATE_MARKDOWN=true | |
### LocalAI | |
DEBUG=true | |
MODELS_PATH=/models | |
IMAGE_PATH=/tmp | |
# See: https://github.com/go-skynet/model-gallery | |
PRELOAD_MODELS=[{"url": "github:go-skynet/model-gallery/gpt4all-j.yaml", "name": "gpt-3.5-turbo"}] |