Spaces:
Running
Running
adding demo
Browse files- Dockerfile +0 -6
- server/backend.py +42 -0
Dockerfile
CHANGED
@@ -30,18 +30,12 @@ RUN pip install --no-cache-dir -r requirements.txt
|
|
30 |
# Install additional software
|
31 |
RUN curl -fsSL https://ollama.com/install.sh | sh
|
32 |
|
33 |
-
|
34 |
-
|
35 |
# Copy the entire application
|
36 |
COPY . .
|
37 |
|
38 |
# Set proper permissions for the translations directory
|
39 |
RUN chmod -R 777 translations
|
40 |
|
41 |
-
# Start Ollama service
|
42 |
-
RUN systemctl start ollama
|
43 |
|
44 |
-
# Download the required model
|
45 |
-
RUN ollama pull llama3
|
46 |
# Define the command to run the application
|
47 |
CMD ["python", "./run.py"]
|
|
|
30 |
# Install additional software
|
31 |
RUN curl -fsSL https://ollama.com/install.sh | sh
|
32 |
|
|
|
|
|
33 |
# Copy the entire application
|
34 |
COPY . .
|
35 |
|
36 |
# Set proper permissions for the translations directory
|
37 |
RUN chmod -R 777 translations
|
38 |
|
|
|
|
|
39 |
|
|
|
|
|
40 |
# Define the command to run the application
|
41 |
CMD ["python", "./run.py"]
|
server/backend.py
CHANGED
@@ -7,6 +7,47 @@ from server.config import special_instructions
|
|
7 |
from langchain_community.llms import Ollama
|
8 |
import requests
|
9 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
10 |
chatbot_name="Lilly"
|
11 |
prompt: str = """You are a {chatbot_name}, friendly AI companion. You should answer what the user request.
|
12 |
user: {input}
|
@@ -48,6 +89,7 @@ class Backend_Api:
|
|
48 |
api_key = request.json['api_key']
|
49 |
jailbreak = request.json['jailbreak']
|
50 |
model = request.json['model']
|
|
|
51 |
messages = build_messages(jailbreak)
|
52 |
local_mode_1=True
|
53 |
local_model_2 =False
|
|
|
7 |
from langchain_community.llms import Ollama
|
8 |
import requests
|
9 |
|
10 |
+
|
11 |
+
|
12 |
+
|
13 |
+
import os
|
14 |
+
import subprocess
|
15 |
+
|
16 |
+
# List of allowed models
|
17 |
+
ALLOWED_MODELS = [
|
18 |
+
'llama3',
|
19 |
+
'llama3:70b',
|
20 |
+
'phi3',
|
21 |
+
'mistral',
|
22 |
+
'neural-chat',
|
23 |
+
'starling-lm',
|
24 |
+
'codellama',
|
25 |
+
'llama2-uncensored',
|
26 |
+
'llava',
|
27 |
+
'gemma:2b',
|
28 |
+
'gemma:7b',
|
29 |
+
'solar',
|
30 |
+
]
|
31 |
+
|
32 |
+
# Directory where models are stored (current directory)
|
33 |
+
MODEL_DIR = os.getcwd()
|
34 |
+
|
35 |
+
def is_model_downloaded(model_name):
|
36 |
+
"""Check if the model is already downloaded."""
|
37 |
+
model_path = os.path.join(MODEL_DIR, model_name.replace(':', '_'))
|
38 |
+
return os.path.exists(model_path)
|
39 |
+
|
40 |
+
def download_model(model_name):
|
41 |
+
"""Download the model using the ollama command."""
|
42 |
+
if model_name in ALLOWED_MODELS:
|
43 |
+
if not is_model_downloaded(model_name):
|
44 |
+
print(f"Downloading model: {model_name}")
|
45 |
+
subprocess.run(['ollama', 'pull', model_name], check=True)
|
46 |
+
print(f"Model {model_name} downloaded successfully.")
|
47 |
+
else:
|
48 |
+
print(f"Model {model_name} is already downloaded.")
|
49 |
+
else:
|
50 |
+
print(f"Model {model_name} is not in the list of allowed models.")
|
51 |
chatbot_name="Lilly"
|
52 |
prompt: str = """You are a {chatbot_name}, friendly AI companion. You should answer what the user request.
|
53 |
user: {input}
|
|
|
89 |
api_key = request.json['api_key']
|
90 |
jailbreak = request.json['jailbreak']
|
91 |
model = request.json['model']
|
92 |
+
download_model(model)
|
93 |
messages = build_messages(jailbreak)
|
94 |
local_mode_1=True
|
95 |
local_model_2 =False
|