Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -7,7 +7,7 @@ from threading import Timer
|
|
7 |
HUGGINGFACE_TOKEN = os.environ.get("HUGGINGFACE_TOKEN")
|
8 |
def get_available_free(use_cache = False):
|
9 |
if use_cache:
|
10 |
-
if os.path.exists("
|
11 |
print("Loading data from file...")
|
12 |
return pd.read_csv("data.csv").to_dict(orient='list')
|
13 |
models_dict = InferenceClient(token=HUGGINGFACE_TOKEN).list_deployed_models("text-generation-inference")
|
@@ -38,9 +38,9 @@ def get_available_free(use_cache = False):
|
|
38 |
pro_sub = True
|
39 |
if e and "Rate limit reached" in str(e):
|
40 |
print("Rate Limited!!")
|
41 |
-
if os.path.exists("data.csv"):
|
42 |
print("Loading data from file...")
|
43 |
-
return pd.read_csv("
|
44 |
return []
|
45 |
try:
|
46 |
InferenceClient(m, timeout=10).chat_completion(messages=[{'role': 'user', 'content': 'Hi.'}], max_tokens=1)
|
@@ -53,14 +53,14 @@ def get_available_free(use_cache = False):
|
|
53 |
print("Rate Limited!!")
|
54 |
if os.path.exists("data.csv"):
|
55 |
print("Loading data from file...")
|
56 |
-
return pd.read_csv("data.csv").to_dict(orient='list')
|
57 |
return []
|
58 |
models_conclusion["Model"].append(m)
|
59 |
models_conclusion["API"].append("Free" if chat_available or text_available else ("Pro Subscription" if pro_sub else "Not Responding"))
|
60 |
models_conclusion["Chat Completion"].append("---" if (pro_sub or (not chat_available and not text_available)) else ("β" if chat_available else "β"))
|
61 |
models_conclusion["Text Completion"].append("---" if (pro_sub or (not chat_available and not text_available)) else ("β" if text_available else "β"))
|
62 |
models_conclusion["Vision"].append("β" if vision_available else "β")
|
63 |
-
pd.DataFrame(models_conclusion).to_csv("
|
64 |
return models_conclusion
|
65 |
|
66 |
def update_data(use_cache = False):
|
|
|
7 |
HUGGINGFACE_TOKEN = os.environ.get("HUGGINGFACE_TOKEN")
|
8 |
def get_available_free(use_cache = False):
|
9 |
if use_cache:
|
10 |
+
if os.path.exists(str(os.getcwd())+"/data.csv"):
|
11 |
print("Loading data from file...")
|
12 |
return pd.read_csv("data.csv").to_dict(orient='list')
|
13 |
models_dict = InferenceClient(token=HUGGINGFACE_TOKEN).list_deployed_models("text-generation-inference")
|
|
|
38 |
pro_sub = True
|
39 |
if e and "Rate limit reached" in str(e):
|
40 |
print("Rate Limited!!")
|
41 |
+
if os.path.exists(str(os.getcwd())+"/data.csv"):
|
42 |
print("Loading data from file...")
|
43 |
+
return pd.read_csv(str(os.getcwd())+"/data.csv").to_dict(orient='list')
|
44 |
return []
|
45 |
try:
|
46 |
InferenceClient(m, timeout=10).chat_completion(messages=[{'role': 'user', 'content': 'Hi.'}], max_tokens=1)
|
|
|
53 |
print("Rate Limited!!")
|
54 |
if os.path.exists("data.csv"):
|
55 |
print("Loading data from file...")
|
56 |
+
return pd.read_csv(str(os.getcwd())+"/data.csv").to_dict(orient='list')
|
57 |
return []
|
58 |
models_conclusion["Model"].append(m)
|
59 |
models_conclusion["API"].append("Free" if chat_available or text_available else ("Pro Subscription" if pro_sub else "Not Responding"))
|
60 |
models_conclusion["Chat Completion"].append("---" if (pro_sub or (not chat_available and not text_available)) else ("β" if chat_available else "β"))
|
61 |
models_conclusion["Text Completion"].append("---" if (pro_sub or (not chat_available and not text_available)) else ("β" if text_available else "β"))
|
62 |
models_conclusion["Vision"].append("β" if vision_available else "β")
|
63 |
+
pd.DataFrame(models_conclusion).to_csv(str(os.getcwd())+"/data.csv", index=False)
|
64 |
return models_conclusion
|
65 |
|
66 |
def update_data(use_cache = False):
|