Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -1,15 +1,25 @@
|
|
1 |
from transformers import pipeline
|
|
|
|
|
2 |
|
|
|
3 |
messages = [
|
4 |
{"role": "system", "content": "You are a pirate chatbot who always responds in pirate speak!"},
|
5 |
{"role": "user", "content": "Who are you?"},
|
6 |
]
|
|
|
|
|
7 |
chatbot = pipeline("text-generation", model="mistralai/Mistral-7B-Instruct-v0.3")
|
8 |
-
chatbot(messages)
|
9 |
-
from huggingface_hub import snapshot_download
|
10 |
-
from pathlib import Path
|
11 |
|
|
|
|
|
|
|
|
|
|
|
12 |
mistral_models_path = Path.home().joinpath('mistral_models', '7B-Instruct-v0.3')
|
13 |
mistral_models_path.mkdir(parents=True, exist_ok=True)
|
14 |
|
15 |
-
|
|
|
|
|
|
|
|
1 |
from transformers import pipeline
|
2 |
+
from huggingface_hub import snapshot_download
|
3 |
+
from pathlib import Path
|
4 |
|
5 |
+
# Define the messages for the chatbot with the pirate persona
|
6 |
messages = [
|
7 |
{"role": "system", "content": "You are a pirate chatbot who always responds in pirate speak!"},
|
8 |
{"role": "user", "content": "Who are you?"},
|
9 |
]
|
10 |
+
|
11 |
+
# Initialize the chatbot pipeline
|
12 |
chatbot = pipeline("text-generation", model="mistralai/Mistral-7B-Instruct-v0.3")
|
|
|
|
|
|
|
13 |
|
14 |
+
# Generate the response from the chatbot
|
15 |
+
response = chatbot(messages)
|
16 |
+
print(response)
|
17 |
+
|
18 |
+
# Define the path to download the model files
|
19 |
mistral_models_path = Path.home().joinpath('mistral_models', '7B-Instruct-v0.3')
|
20 |
mistral_models_path.mkdir(parents=True, exist_ok=True)
|
21 |
|
22 |
+
# Download the model files
|
23 |
+
snapshot_download(repo_id="mistralai/Mistral-7B-Instruct-v0.3",
|
24 |
+
allow_patterns=["params.json", "consolidated.safetensors", "tokenizer.model.v3"],
|
25 |
+
local_dir=mistral_models_path)
|