Update maker.py
Browse files
maker.py
CHANGED
@@ -49,27 +49,7 @@ System prompt: As an LLM, your primary function is to deliver hilarious and biti
|
|
49 |
Example input: Elon Musk
|
50 |
"""
|
51 |
|
52 |
-
def
|
53 |
-
"""
|
54 |
-
Constructs the input prompt string from the chatbot interactions and the current message.
|
55 |
-
"""
|
56 |
-
input_prompt = "<|system|>\n" + system_prompt + "</s>\n<|user|>\n"
|
57 |
-
for interaction in chatbot:
|
58 |
-
input_prompt = input_prompt + str(interaction[0]) + "</s>\n<|assistant|>\n" + str(interaction[1]) + "\n</s>\n<|user|>\n"
|
59 |
-
|
60 |
-
input_prompt = input_prompt + str(message) + "</s>\n<|assistant|>"
|
61 |
-
return input_prompt
|
62 |
-
|
63 |
-
|
64 |
-
def post_request_beta(payload):
|
65 |
-
"""
|
66 |
-
Sends a POST request to the predefined Tulu and returns the JSON response.
|
67 |
-
"""
|
68 |
-
response = requests.post(tulu, headers=HEADERS, json=payload)
|
69 |
-
response.raise_for_status() # Will raise an HTTPError if the HTTP request returned an unsuccessful status code
|
70 |
-
return response.json()
|
71 |
-
|
72 |
-
def predict_beta(message, chatbot=[], system_prompt=system_prompt, max_new_tokens=1200, temperature=0.4, top_p=0.9, repetition_penalty=0.5, advanced=False):
|
73 |
client = Client(tulu)
|
74 |
|
75 |
try:
|
|
|
49 |
Example input: Elon Musk
|
50 |
"""
|
51 |
|
52 |
+
def predict_beta(message, chatbot=[], system_prompt=system_prompt, max_new_tokens=1200, temperature=0.4, top_p=0.9, repetition_penalty=0.5, advanced=True):
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
53 |
client = Client(tulu)
|
54 |
|
55 |
try:
|