Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -48,7 +48,7 @@ class PromptRefiner:
|
|
48 |
model="llama-3.2-90b-text-preview",
|
49 |
messages=messages,
|
50 |
max_tokens=8192,
|
51 |
-
temperature=0.
|
52 |
)
|
53 |
response_content = response.choices[0].message.content.strip()
|
54 |
try:
|
@@ -96,7 +96,7 @@ class PromptRefiner:
|
|
96 |
model=model,
|
97 |
messages=messages,
|
98 |
max_tokens=8000, # Increased token limit
|
99 |
-
temperature=0.
|
100 |
)
|
101 |
|
102 |
output = response.choices[0].message.content.strip()
|
@@ -222,4 +222,4 @@ if __name__ == '__main__':
|
|
222 |
|
223 |
prompt_refiner = PromptRefiner(api_key)
|
224 |
gradio_interface = GradioInterface(prompt_refiner)
|
225 |
-
gradio_interface.launch()
|
|
|
48 |
model="llama-3.2-90b-text-preview",
|
49 |
messages=messages,
|
50 |
max_tokens=8192,
|
51 |
+
temperature=0.7
|
52 |
)
|
53 |
response_content = response.choices[0].message.content.strip()
|
54 |
try:
|
|
|
96 |
model=model,
|
97 |
messages=messages,
|
98 |
max_tokens=8000, # Increased token limit
|
99 |
+
temperature=0.8
|
100 |
)
|
101 |
|
102 |
output = response.choices[0].message.content.strip()
|
|
|
222 |
|
223 |
prompt_refiner = PromptRefiner(api_key)
|
224 |
gradio_interface = GradioInterface(prompt_refiner)
|
225 |
+
gradio_interface.launch(share=True)
|