Spaces:
Runtime error
Runtime error
arjunanand13
commited on
Commit
•
0feefad
1
Parent(s):
181902f
Update app.py
Browse files
app.py
CHANGED
@@ -21,8 +21,8 @@ class StopOnTokens(StoppingCriteria):
|
|
21 |
return False
|
22 |
|
23 |
# Load the LLaMA model and tokenizer
|
24 |
-
|
25 |
-
model_id = 'mistralai/Mistral-7B-Instruct-v0.3'
|
26 |
device = f'cuda:{cuda.current_device()}' if cuda.is_available() else 'cpu'
|
27 |
|
28 |
# Set quantization configuration
|
|
|
21 |
return False
|
22 |
|
23 |
# Load the LLaMA model and tokenizer
|
24 |
+
model_id = 'meta-llama/Meta-Llama-3-8B-Instruct'
|
25 |
+
# model_id = 'mistralai/Mistral-7B-Instruct-v0.3'
|
26 |
device = f'cuda:{cuda.current_device()}' if cuda.is_available() else 'cpu'
|
27 |
|
28 |
# Set quantization configuration
|