Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -18,9 +18,9 @@ def get_available_free():
|
|
18 |
chat_available = False
|
19 |
pro_sub = False
|
20 |
try:
|
21 |
-
InferenceClient(m, timeout=10, token=
|
22 |
text_available = True
|
23 |
-
InferenceClient(m, timeout=10, token=
|
24 |
chat_available = True
|
25 |
except Exception as e:
|
26 |
print(e)
|
@@ -87,7 +87,7 @@ def color_status(api_value, cell_value):
|
|
87 |
def search_models(query):
|
88 |
return display_table(query)
|
89 |
|
90 |
-
description = "This is a space that retrieves the status of all supported HF LLM Serverless Inference APIs.\nUpdates every 2 hours
|
91 |
with gr.Blocks() as demo:
|
92 |
gr.Markdown("## HF Serverless LLM Inference API Status")
|
93 |
gr.Markdown(description)
|
|
|
18 |
chat_available = False
|
19 |
pro_sub = False
|
20 |
try:
|
21 |
+
InferenceClient(m, timeout=10, token=HUGGINGFACE_TOKEN).text_generation("Hi.", max_new_tokens=1)
|
22 |
text_available = True
|
23 |
+
InferenceClient(m, timeout=10, token=HUGGINGFACE_TOKEN).chat_completion(messages=[{'role': 'user', 'content': 'Hi.'}], max_tokens=1)
|
24 |
chat_available = True
|
25 |
except Exception as e:
|
26 |
print(e)
|
|
|
87 |
def search_models(query):
|
88 |
return display_table(query)
|
89 |
|
90 |
+
description = "This is a space that retrieves the status of all supported HF LLM Serverless Inference APIs.\nUpdates every 2 hours!"
|
91 |
with gr.Blocks() as demo:
|
92 |
gr.Markdown("## HF Serverless LLM Inference API Status")
|
93 |
gr.Markdown(description)
|