Spaces:
Paused
Paused
Update app.py
Browse files
app.py
CHANGED
@@ -9,7 +9,11 @@ import requests
|
|
9 |
|
10 |
# Define the device
|
11 |
device = "cuda" if torch.cuda.is_available() else "cpu"
|
12 |
-
|
|
|
|
|
|
|
|
|
13 |
|
14 |
# Use model IDs as variables
|
15 |
base_model_id = "tiiuae/falcon-7b-instruct"
|
@@ -51,7 +55,7 @@ class FalconChatBot:
|
|
51 |
return filtered_history
|
52 |
|
53 |
|
54 |
-
def predict(self, system_prompt, user_message, assistant_message, history, temperature, max_new_tokens, top_p, repetition_penalty):
|
55 |
|
56 |
# Process the history to remove special commands
|
57 |
processed_history = self.process_history(history)
|
|
|
9 |
|
10 |
# Define the device
|
11 |
device = "cuda" if torch.cuda.is_available() else "cpu"
|
12 |
+
#Define variables
|
13 |
+
temperature=0.4
|
14 |
+
max_new_tokens=240
|
15 |
+
top_p=0.92
|
16 |
+
repetition_penalty=1.7
|
17 |
|
18 |
# Use model IDs as variables
|
19 |
base_model_id = "tiiuae/falcon-7b-instruct"
|
|
|
55 |
return filtered_history
|
56 |
|
57 |
|
58 |
+
def predict(self, system_prompt, user_message, assistant_message, history, temperature=0.4, max_new_tokens=240, top_p, repetition_penalty):
|
59 |
|
60 |
# Process the history to remove special commands
|
61 |
processed_history = self.process_history(history)
|