Spaces:
Sleeping
Sleeping
trying quick fix inference endpoint
Browse files
app.py
CHANGED
@@ -4,6 +4,7 @@ import requests
|
|
4 |
import pytz
|
5 |
import yaml
|
6 |
from tools.final_answer import FinalAnswerTool
|
|
|
7 |
|
8 |
from Gradio_UI import GradioUI
|
9 |
|
@@ -35,13 +36,13 @@ def get_current_time_in_timezone(timezone: str) -> str:
|
|
35 |
|
36 |
|
37 |
final_answer = FinalAnswerTool()
|
38 |
-
model = HfApiModel(
|
39 |
-
max_tokens=2096,
|
40 |
-
temperature=0.5,
|
41 |
-
model_id='mradermacher/Qwen2.5-Coder-7B-Instruct-i1-GGUF',
|
42 |
-
custom_role_conversions=None,
|
43 |
-
)
|
44 |
-
|
45 |
|
46 |
# Import tool from Hub
|
47 |
image_generation_tool = load_tool("agents-course/text-to-image", trust_remote_code=True)
|
|
|
4 |
import pytz
|
5 |
import yaml
|
6 |
from tools.final_answer import FinalAnswerTool
|
7 |
+
from huggingface_hub import InferenceClient
|
8 |
|
9 |
from Gradio_UI import GradioUI
|
10 |
|
|
|
36 |
|
37 |
|
38 |
final_answer = FinalAnswerTool()
|
39 |
+
#model = HfApiModel(
|
40 |
+
# max_tokens=2096,
|
41 |
+
# temperature=0.5,
|
42 |
+
# model_id='mradermacher/Qwen2.5-Coder-7B-Instruct-i1-GGUF',
|
43 |
+
# custom_role_conversions=None,
|
44 |
+
#)
|
45 |
+
model = InferenceClient("https://jc26mwg228mkj8dw.us-east-1.aws.endpoints.huggingface.cloud/")
|
46 |
|
47 |
# Import tool from Hub
|
48 |
image_generation_tool = load_tool("agents-course/text-to-image", trust_remote_code=True)
|