Spaces:
Running
Running
update
Browse files
demo.py
CHANGED
@@ -213,7 +213,7 @@ def ensure_client_available():
|
|
213 |
def model_prompting(
|
214 |
llm_model: str,
|
215 |
prompt: str,
|
216 |
-
max_token_num: Optional[int] =
|
217 |
temperature: Optional[float] = 0.2,
|
218 |
top_p: Optional[float] = 0.7,
|
219 |
stream: Optional[bool] = True,
|
@@ -859,7 +859,7 @@ Keep the description concise and informative. Respond with just the task descrip
|
|
859 |
task_description = model_prompting(
|
860 |
llm_model="meta/llama-3.1-8b-instruct",
|
861 |
prompt=prompt,
|
862 |
-
max_token_num=
|
863 |
temperature=0.1,
|
864 |
top_p=0.9,
|
865 |
stream=True
|
|
|
213 |
def model_prompting(
|
214 |
llm_model: str,
|
215 |
prompt: str,
|
216 |
+
max_token_num: Optional[int] = 2048,
|
217 |
temperature: Optional[float] = 0.2,
|
218 |
top_p: Optional[float] = 0.7,
|
219 |
stream: Optional[bool] = True,
|
|
|
859 |
task_description = model_prompting(
|
860 |
llm_model="meta/llama-3.1-8b-instruct",
|
861 |
prompt=prompt,
|
862 |
+
max_token_num=2048,
|
863 |
temperature=0.1,
|
864 |
top_p=0.9,
|
865 |
stream=True
|