Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -47,7 +47,7 @@ class PromptRefiner:
|
|
47 |
response = self.client.chat.completions.create(
|
48 |
model="llama-3.2-90b-text-preview",
|
49 |
messages=messages,
|
50 |
-
max_tokens=
|
51 |
temperature=0.5
|
52 |
)
|
53 |
response_content = response.choices[0].message.content.strip()
|
@@ -95,7 +95,7 @@ class PromptRefiner:
|
|
95 |
response = self.client.chat.completions.create(
|
96 |
model=model,
|
97 |
messages=messages,
|
98 |
-
max_tokens=
|
99 |
temperature=0.5
|
100 |
)
|
101 |
|
|
|
47 |
response = self.client.chat.completions.create(
|
48 |
model="llama-3.2-90b-text-preview",
|
49 |
messages=messages,
|
50 |
+
max_tokens=8192,
|
51 |
temperature=0.5
|
52 |
)
|
53 |
response_content = response.choices[0].message.content.strip()
|
|
|
95 |
response = self.client.chat.completions.create(
|
96 |
model=model,
|
97 |
messages=messages,
|
98 |
+
max_tokens=8192, # Increased token limit
|
99 |
temperature=0.5
|
100 |
)
|
101 |
|