Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -4,15 +4,15 @@ import requests
|
|
4 |
import re
|
5 |
from typing import List, Dict, Tuple, Any # ๆทปๅ ็ฑปๅๆ็คบ
|
6 |
|
7 |
-
#
|
8 |
-
#
|
9 |
from api_usage import (
|
10 |
get_subscription,
|
11 |
check_key_availability,
|
12 |
get_orgs_me,
|
13 |
check_key_ant_availability,
|
14 |
check_ant_rate_limit,
|
15 |
-
check_key_gemini_availability,
|
16 |
check_key_azure_availability,
|
17 |
get_azure_status,
|
18 |
get_azure_deploy,
|
@@ -32,15 +32,16 @@ from api_usage import (
|
|
32 |
)
|
33 |
|
34 |
# โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
|
35 |
-
#
|
36 |
# โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
|
37 |
def get_key_oai_info(key: str) -> Dict[str, Any]:
|
38 |
session = requests.Session()
|
39 |
-
|
40 |
|
41 |
info_dict = {
|
42 |
"key_type": "OpenAI",
|
43 |
-
"key_availability":
|
|
|
44 |
"gpt4_availability": "",
|
45 |
"gpt4_32k_availability": "",
|
46 |
"default_org": "",
|
@@ -51,17 +52,68 @@ def get_key_oai_info(key: str) -> Dict[str, Any]:
|
|
51 |
"tokens_per_minute": "",
|
52 |
"quota": "",
|
53 |
"all_models": "",
|
|
|
54 |
}
|
55 |
|
56 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
57 |
return info_dict
|
58 |
|
59 |
-
if
|
60 |
-
|
|
|
|
|
|
|
|
|
|
|
61 |
if status_me == 200:
|
62 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
63 |
|
64 |
-
subscription_info = get_subscription(key, session,
|
65 |
if subscription_info:
|
66 |
info_dict.update(
|
67 |
{
|
@@ -73,10 +125,51 @@ def get_key_oai_info(key: str) -> Dict[str, Any]:
|
|
73 |
"models": subscription_info.get("models", ""),
|
74 |
"requests_per_minute": subscription_info.get("rpm", ""),
|
75 |
"tokens_per_minute": subscription_info.get("tpm", ""),
|
76 |
-
"quota": subscription_info.get("quota", ""),
|
77 |
"all_models": subscription_info.get("all_models", ""),
|
78 |
}
|
79 |
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
80 |
return info_dict
|
81 |
|
82 |
|
@@ -111,19 +204,22 @@ async def get_key_ant_info(key: str, rate_limit: bool, claude_model: str) -> Dic
|
|
111 |
|
112 |
return info_dict
|
113 |
|
114 |
-
|
115 |
-
#
|
116 |
def get_key_gemini_info(key: str) -> Dict[str, Any]:
|
117 |
-
"""
|
118 |
-
|
|
|
119 |
info_dict = {
|
120 |
-
"key": key,
|
|
|
121 |
"key_availability": key_avai[0],
|
122 |
"status": key_avai[1],
|
|
|
|
|
123 |
}
|
124 |
return info_dict
|
125 |
|
126 |
-
|
127 |
def get_key_azure_info(endpoint: str, api_key: str) -> Dict[str, Any]:
|
128 |
key_avai = check_key_azure_availability(endpoint, api_key)
|
129 |
info_dict = {
|
@@ -367,172 +463,156 @@ def not_supported(key: str) -> Dict[str, Any]:
|
|
367 |
|
368 |
|
369 |
# โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
|
370 |
-
#
|
371 |
# โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
|
372 |
async def process_single_key(key: str, rate_limit: bool, claude_model: str) -> Dict[str, Any]:
|
373 |
-
"""ๅๆ็ปๅฎ็ๅไธชๅฏ้ฅๅนถ่ฟๅไฟกๆฏๅญๅ
ธใๅ
ๅซGeminiๅฏ้ฅ็ๅทฅไฝ็ถๆๆ ๅฟใ"""
|
374 |
_key = key.strip()
|
375 |
|
376 |
if not _key:
|
377 |
return {"key": "", "key_type": "Empty", "key_availability": False}
|
378 |
|
379 |
-
# OpenRouter
|
380 |
if re.match(re.compile(r"sk-or-v1-[a-z0-9]{64}"), _key):
|
381 |
result = get_key_openrouter_info(_key)
|
382 |
-
return {"key": _key, **result}
|
383 |
-
|
384 |
-
# Anthropic Claude
|
385 |
if re.match(re.compile(r"sk-ant-api03-[a-zA-Z0-9\-_]{93}AA"), _key) or \
|
386 |
(_key.startswith("sk-ant-") and len(_key) == 93) or \
|
387 |
(len(_key) == 89 and re.match(re.compile(r"sk-[a-zA-Z0-9]{86}"), _key)):
|
388 |
result = await get_key_ant_info(_key, rate_limit, claude_model)
|
389 |
-
return {"key": _key, **result}
|
390 |
-
|
391 |
-
# Stability
|
392 |
if re.match(re.compile(r"sk-[a-zA-Z0-9]{48}"), _key) and len(_key) == 51 and "T3BlbkFJ" not in _key:
|
393 |
result = get_key_stability_info(_key)
|
394 |
-
return {"key": _key, **result}
|
395 |
-
|
396 |
-
# Deepseek
|
397 |
if re.match(re.compile(r"sk-[a-f0-9]{32}"), _key):
|
398 |
result = get_key_deepseek_info(_key)
|
399 |
-
return {"key": _key, **result}
|
400 |
-
|
401 |
-
# OpenAI (ๅบ่ฏฅไฝไบๅ
ถไปsk-ๆจกๅผไนๅ)
|
402 |
if _key.startswith("sk-"):
|
403 |
result = get_key_oai_info(_key)
|
404 |
-
return {"key": _key, **result}
|
405 |
|
406 |
-
|
|
|
407 |
if _key.startswith("AIzaSy"):
|
408 |
-
gemini_info = get_key_gemini_info(_key)
|
409 |
-
# ๆทปๅ ๅทฅไฝ็ถๆๆ ๅฟ๏ผๆฃๆฅstatus๏ผ
|
410 |
is_working = gemini_info.get("key_availability") and gemini_info.get("status") == "Working"
|
411 |
-
|
412 |
-
#
|
|
|
|
|
|
|
413 |
result = {
|
414 |
-
"key_type": "Google Gemini", # ๆ็กฎๆทปๅ ็ฑปๅไฟกๆฏ
|
415 |
**gemini_info,
|
416 |
-
"is_gemini_working": is_working
|
|
|
417 |
}
|
418 |
return result
|
419 |
|
420 |
-
# NovelAI
|
421 |
if _key.startswith("pst-"):
|
422 |
result = get_key_nai_info(_key)
|
423 |
-
return {"key": _key, **result}
|
424 |
-
|
425 |
-
# Replicate
|
426 |
if (_key.startswith("r8_") and len(_key) == 40) or (_key.islower() and len(_key) == 40):
|
427 |
result = get_key_replicate_info(_key)
|
428 |
-
return {"key": _key, **result}
|
429 |
-
|
430 |
-
# xAI
|
431 |
if _key.startswith("xai-"):
|
432 |
result = get_key_xai_info(_key)
|
433 |
-
return {"key": _key, **result}
|
434 |
-
|
435 |
-
# Azure endpoint: "name:key"
|
436 |
if len(_key.split(":")) == 2:
|
437 |
name, potential_key = _key.split(":", 1)
|
438 |
if re.fullmatch(r'[a-fA-F0-9]{32}', potential_key) and "openai.azure.com" not in name:
|
439 |
endpoint = f"https://{name}.openai.azure.com/"
|
440 |
api_key = potential_key
|
441 |
result = get_key_azure_info(endpoint, api_key)
|
442 |
-
return {"key": _key, **result}
|
443 |
-
|
444 |
-
# Azure endpoint: "https://xxx.openai.azure.com;key"
|
445 |
if ";" in _key and "openai.azure.com" in _key.split(";")[0]:
|
446 |
endpoint, api_key = _key.split(";", 1)
|
447 |
result = get_key_azure_info(endpoint, api_key)
|
448 |
-
return {"key": _key, **result}
|
449 |
-
|
450 |
-
# AWS
|
451 |
if _key.startswith("AKIA") and len(_key.split(":")[0]) == 20 and _key.split(":")[0].isalnum() and _key.split(":")[0].isupper() and len(_key.split(':')) == 2:
|
452 |
result = await get_key_aws_info(_key)
|
453 |
-
return {"key": _key, **result}
|
454 |
-
|
455 |
-
# ElevenLabs
|
456 |
if re.fullmatch(r"[a-f0-9]{32}", _key) or re.fullmatch(r"sk_[a-f0-9]{48}", _key):
|
457 |
result = get_key_elevenlabs_info(_key)
|
458 |
-
return {"key": _key, **result}
|
459 |
-
|
460 |
-
# Mistral
|
461 |
if re.fullmatch(r"[a-zA-Z0-9]{32}", _key) and not _key.startswith('sk-'):
|
462 |
result = get_key_mistral_info(_key)
|
463 |
-
return {"key": _key, **result}
|
464 |
-
|
465 |
-
# Groq
|
466 |
if re.match(re.compile(r"gsk_[a-zA-Z0-9]{20}WGdyb3FY[a-zA-Z0-9]{24}"), _key):
|
467 |
result = get_key_groq_info(_key)
|
468 |
-
return {"key": _key, **result}
|
469 |
-
|
470 |
-
# GCP - refresh token
|
471 |
if re.match(re.compile(r"[\w\-]+:[\w\-@\.]+:.+:.+"), _key):
|
472 |
parts = _key.split(':')
|
473 |
if len(parts) >= 4:
|
474 |
result = await get_key_gcp_info(_key, 0)
|
475 |
-
return {"key": _key, **result}
|
476 |
-
|
477 |
-
# GCP - service account
|
478 |
if re.match(re.compile(r"[\w\-]+:[\w\-@\.]+:.+\\n"), _key):
|
479 |
parts = _key.split(':')
|
480 |
if len(parts) >= 3:
|
481 |
result = await get_key_gcp_info(_key, 1)
|
482 |
-
return {"key": _key, **result}
|
483 |
-
|
484 |
-
# Not supported
|
485 |
-
result = not_supported(_key) # ๅ
ๅซkey_type, key_availability, status
|
486 |
-
# ๆทปๅ ๅฏ้ฅๅผๅนถ่ฟๅ
|
487 |
return {"key": _key, **result}
|
488 |
|
489 |
|
490 |
# โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
|
491 |
-
#
|
492 |
# โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
|
493 |
-
async def sort_keys(text: str, rate_limit: bool, claude_model: str) -> Tuple[List[Dict[str, Any]], str]:
|
494 |
-
"""ๅๆๆๆฌๆกไธญ่พๅ
ฅ็ๅคไธชๅฏ้ฅ๏ผๆ่ก๏ผ๏ผๅนถ่ฟๅๅฎๆด็ปๆๅๆๆGeminiๅฏ้ฅๅ่กจใ"""
|
495 |
keys = [k.strip() for k in text.splitlines() if k.strip()]
|
496 |
if not keys:
|
497 |
-
return [], ""
|
498 |
|
499 |
tasks = [process_single_key(k, rate_limit, claude_model) for k in keys]
|
500 |
results = await asyncio.gather(*tasks)
|
501 |
|
502 |
-
# ไป
็ญ้ๆๆ็Geminiๅฏ้ฅ๏ผไฝฟ็จis_gemini_workingๆ ๅฟ๏ผ
|
503 |
working_gemini_keys = []
|
|
|
|
|
|
|
504 |
for result in results:
|
505 |
-
|
506 |
-
if
|
507 |
-
|
508 |
-
working_gemini_keys.append(result["key"])
|
509 |
|
510 |
-
|
511 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
512 |
|
513 |
|
514 |
# โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
|
515 |
-
# UI
|
516 |
# โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
|
517 |
-
def clear_inputs():
|
518 |
-
return "", "", ""
|
519 |
|
520 |
|
521 |
# โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
|
522 |
-
# Gradio
|
523 |
# โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
|
524 |
with gr.Blocks() as demo:
|
525 |
gr.Markdown(
|
526 |
"""
|
527 |
-
#
|
528 |
-
|
529 |
-
|
530 |
-
|
531 |
-
|
532 |
-
|
533 |
-
* **
|
534 |
-
* **
|
535 |
-
*
|
|
|
|
|
536 |
"""
|
537 |
)
|
538 |
|
@@ -548,47 +628,60 @@ with gr.Blocks() as demo:
|
|
548 |
key_box = gr.Textbox(
|
549 |
lines=5,
|
550 |
max_lines=20,
|
551 |
-
label="API
|
552 |
-
placeholder="
|
553 |
)
|
554 |
with gr.Row():
|
555 |
claude_model = gr.Dropdown(
|
556 |
claude_options,
|
557 |
value="claude-3-haiku-20240307",
|
558 |
-
label="Claude
|
559 |
scale=3
|
560 |
)
|
561 |
-
rate_limit = gr.Checkbox(label="
|
562 |
|
563 |
with gr.Row():
|
564 |
-
clear_button = gr.Button("
|
565 |
-
submit_button = gr.Button("
|
566 |
|
567 |
with gr.Column(scale=2):
|
568 |
-
info = gr.JSON(label="API
|
569 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
570 |
gemini_keys_output = gr.Textbox(
|
571 |
-
label="
|
572 |
-
info="
|
573 |
lines=3,
|
574 |
max_lines=10,
|
575 |
-
interactive=False,
|
576 |
)
|
577 |
|
578 |
-
# ็นๅปๆธ
้คๆ้ฎๆถ้็ฝฎ่พๅ
ฅๆกๅ่พๅบๆก
|
579 |
clear_button.click(
|
580 |
-
fn=clear_inputs,
|
581 |
inputs=None,
|
582 |
-
outputs=[key_box, info, gemini_keys_output]
|
583 |
)
|
584 |
|
585 |
-
# ็นๅปๆไบคๆ้ฎๆถ่ฐ็จsort_keysๅฝๆฐๅนถๅ้
็ปๆ
|
586 |
submit_button.click(
|
587 |
fn=sort_keys,
|
588 |
inputs=[key_box, rate_limit, claude_model],
|
589 |
-
outputs=[info, gemini_keys_output],
|
590 |
api_name="sort_keys",
|
591 |
)
|
592 |
|
593 |
-
|
594 |
-
demo.launch()
|
|
|
4 |
import re
|
5 |
from typing import List, Dict, Tuple, Any # ๆทปๅ ็ฑปๅๆ็คบ
|
6 |
|
7 |
+
# api_usage ๆจกๅๅๅฎๅญๅจไบ้ๅ็จๆท็็ฏๅขไธญ.
|
8 |
+
# ๅฏผๅ
ฅๆ้็ๅฝๆฐ๏ผๅฎ้
ไฝฟ็จๆถ้่ฆ็ธๅบๆจกๅ๏ผ
|
9 |
from api_usage import (
|
10 |
get_subscription,
|
11 |
check_key_availability,
|
12 |
get_orgs_me,
|
13 |
check_key_ant_availability,
|
14 |
check_ant_rate_limit,
|
15 |
+
check_key_gemini_availability,
|
16 |
check_key_azure_availability,
|
17 |
get_azure_status,
|
18 |
get_azure_deploy,
|
|
|
32 |
)
|
33 |
|
34 |
# โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
|
35 |
+
# Key-specific helper functions (OpenAI ๅฝๆฐๅทฒไฟฎๆน)
|
36 |
# โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
|
37 |
def get_key_oai_info(key: str) -> Dict[str, Any]:
|
38 |
session = requests.Session()
|
39 |
+
raw_status_code, org_data_or_error = check_key_availability(session, key)
|
40 |
|
41 |
info_dict = {
|
42 |
"key_type": "OpenAI",
|
43 |
+
"key_availability": False,
|
44 |
+
"has_sufficient_quota": True, # ้ป่ฎค่ฎพไธบTrue๏ผๅฆๆ้ฎ้ขๅๆนไธบFalse
|
45 |
"gpt4_availability": "",
|
46 |
"gpt4_32k_availability": "",
|
47 |
"default_org": "",
|
|
|
52 |
"tokens_per_minute": "",
|
53 |
"quota": "",
|
54 |
"all_models": "",
|
55 |
+
"status_message": ""
|
56 |
}
|
57 |
|
58 |
+
org_data_for_subscription = None
|
59 |
+
|
60 |
+
if raw_status_code == 200:
|
61 |
+
info_dict["key_availability"] = True
|
62 |
+
org_data_for_subscription = org_data_or_error
|
63 |
+
elif raw_status_code == 401: # Unauthorized
|
64 |
+
info_dict["status_message"] = "Unauthorized: Invalid API key."
|
65 |
+
info_dict["has_sufficient_quota"] = False
|
66 |
+
return info_dict
|
67 |
+
elif raw_status_code == 403: # Forbidden
|
68 |
+
status_me, orgs_me_data = get_orgs_me(session, key)
|
69 |
+
if status_me == 200:
|
70 |
+
info_dict["key_availability"] = True
|
71 |
+
org_data_for_subscription = orgs_me_data
|
72 |
+
else:
|
73 |
+
info_dict["status_message"] = f"Forbidden, and get_orgs_me failed (status: {status_me}). Key might be inactive or lack permissions."
|
74 |
+
info_dict["has_sufficient_quota"] = False
|
75 |
+
return info_dict
|
76 |
+
elif raw_status_code == 429: # Too Many Requests (Rate Limit or Quota)
|
77 |
+
info_dict["key_availability"] = True # ๅฏ้ฅๆฌ่บซๅฏ่ฝๆฏๆๆ็
|
78 |
+
info_dict["has_sufficient_quota"] = False # ่ขซ่งไธบ้
้ขไธ่ถณ
|
79 |
+
info_dict["status_message"] = "Rate limit or quota likely exceeded (initial check)."
|
80 |
+
if isinstance(org_data_or_error, dict) and "error" in org_data_or_error:
|
81 |
+
error_details = org_data_or_error["error"]
|
82 |
+
current_quota_message = error_details.get("message", "Quota details unavailable from initial check")
|
83 |
+
info_dict["quota"] = current_quota_message
|
84 |
+
if "insufficient_quota" not in current_quota_message.lower(): # ๅฆๆ่ฏฆ็ปๆถๆฏไธญๆฒกๆinsufficient_quota๏ผๅๅจ็ถๆๆถๆฏไธญๆทปๅ
|
85 |
+
info_dict["status_message"] += f" Error: {current_quota_message}"
|
86 |
+
else: # ๊ธฐํ ์๋ฌ
|
87 |
+
info_dict["status_message"] = f"Key check failed (status: {raw_status_code})."
|
88 |
+
if isinstance(org_data_or_error, dict) and "error" in org_data_or_error:
|
89 |
+
info_dict["status_message"] += f" Error: {org_data_or_error['error'].get('message', str(org_data_or_error))}"
|
90 |
+
elif isinstance(org_data_or_error, str):
|
91 |
+
info_dict["status_message"] += f" Details: {org_data_or_error}"
|
92 |
+
info_dict["has_sufficient_quota"] = False
|
93 |
return info_dict
|
94 |
|
95 |
+
if not info_dict["key_availability"]:
|
96 |
+
# ๅนถ้็ฑไบ429้่ฏฏ็ๅ
ถไปๅๅ key_availability๊ฐ False๋ฉด ๋ฐํ
|
97 |
+
if raw_status_code != 429:
|
98 |
+
return info_dict
|
99 |
+
|
100 |
+
if not org_data_for_subscription and info_dict["key_availability"]:
|
101 |
+
status_me, orgs_me_data = get_orgs_me(session, key)
|
102 |
if status_me == 200:
|
103 |
+
org_data_for_subscription = orgs_me_data
|
104 |
+
else:
|
105 |
+
info_dict["status_message"] = (info_dict["status_message"] + " Could not identify organization for subscription.").strip()
|
106 |
+
info_dict["key_availability"] = False
|
107 |
+
info_dict["has_sufficient_quota"] = False
|
108 |
+
return info_dict
|
109 |
+
|
110 |
+
if not org_data_for_subscription and info_dict["key_availability"]:
|
111 |
+
info_dict["status_message"] = (info_dict["status_message"] + " Organization data for subscription is missing.").strip()
|
112 |
+
info_dict["key_availability"] = False
|
113 |
+
info_dict["has_sufficient_quota"] = False
|
114 |
+
return info_dict
|
115 |
|
116 |
+
subscription_info = get_subscription(key, session, org_data_for_subscription)
|
117 |
if subscription_info:
|
118 |
info_dict.update(
|
119 |
{
|
|
|
125 |
"models": subscription_info.get("models", ""),
|
126 |
"requests_per_minute": subscription_info.get("rpm", ""),
|
127 |
"tokens_per_minute": subscription_info.get("tpm", ""),
|
128 |
+
"quota": subscription_info.get("quota") if subscription_info.get("quota") is not None else info_dict.get("quota", ""),
|
129 |
"all_models": subscription_info.get("all_models", ""),
|
130 |
}
|
131 |
)
|
132 |
+
|
133 |
+
error_info = subscription_info.get("error")
|
134 |
+
if error_info and isinstance(error_info, dict):
|
135 |
+
err_type = error_info.get("type", "").lower()
|
136 |
+
err_code = error_info.get("code", "").lower()
|
137 |
+
err_msg = error_info.get("message", "").lower()
|
138 |
+
|
139 |
+
if "insufficient_quota" in err_type or \
|
140 |
+
"insufficient_quota" in err_code or \
|
141 |
+
"you exceeded your current quota" in err_msg or \
|
142 |
+
"payment required" in err_msg or \
|
143 |
+
("billing" in err_msg and "issue" in err_msg):
|
144 |
+
info_dict["has_sufficient_quota"] = False
|
145 |
+
new_quota_message = f"Insufficient: {error_info.get('message', err_type)}"
|
146 |
+
info_dict["quota"] = new_quota_message
|
147 |
+
current_status_msg = info_dict["status_message"]
|
148 |
+
additional_msg = f" Quota/Billing issue from error object: {error_info.get('message', err_type)}"
|
149 |
+
if additional_msg.strip() not in current_status_msg :
|
150 |
+
info_dict["status_message"] = (current_status_msg + additional_msg).strip()
|
151 |
+
|
152 |
+
if "account_deactivated" in str(subscription_info).lower() or \
|
153 |
+
"payment_failed" in str(subscription_info).lower():
|
154 |
+
info_dict["has_sufficient_quota"] = False
|
155 |
+
current_status_msg = info_dict["status_message"]
|
156 |
+
additional_msg = " Account issue (e.g., deactivated, payment failed) from subscription info."
|
157 |
+
if additional_msg.strip() not in current_status_msg:
|
158 |
+
info_dict["status_message"] = (current_status_msg + additional_msg).strip()
|
159 |
+
else:
|
160 |
+
if info_dict["key_availability"]:
|
161 |
+
info_dict["status_message"] = (info_dict["status_message"] + " Failed to retrieve full subscription details.").strip()
|
162 |
+
info_dict["has_sufficient_quota"] = False
|
163 |
+
|
164 |
+
if info_dict.get("has_sufficient_quota") is True:
|
165 |
+
current_quota_field_value = info_dict.get("quota")
|
166 |
+
if current_quota_field_value:
|
167 |
+
if "insufficient_quota" in str(current_quota_field_value).lower():
|
168 |
+
info_dict["has_sufficient_quota"] = False
|
169 |
+
insufficient_msg = "Insufficient quota indicated in 'quota' field."
|
170 |
+
if insufficient_msg not in info_dict["status_message"]:
|
171 |
+
info_dict["status_message"] = (info_dict["status_message"] + f" {insufficient_msg}").strip()
|
172 |
+
|
173 |
return info_dict
|
174 |
|
175 |
|
|
|
204 |
|
205 |
return info_dict
|
206 |
|
207 |
+
########## ๆดๆน็้จๅ ##########
|
208 |
+
# 1. get_key_gemini_info ไฟฎๆนๅฝๆฐไปฅๅ
ๅซๆจกๅๅ่กจ
|
209 |
def get_key_gemini_info(key: str) -> Dict[str, Any]:
|
210 |
+
"""Gemini ่ทๅๅ
ณ้ฎไฟกๆฏๅนถๅ
ๅซๆจกๅๅ่กจ."""
|
211 |
+
# check_key_gemini_availability๊ฐ (bool, str, list_of_models) ๅ่ฎพ่ฟๅๅ
็ป
|
212 |
+
key_avai = check_key_gemini_availability(key)
|
213 |
info_dict = {
|
214 |
+
"key": key,
|
215 |
+
"key_type": "Google Gemini",
|
216 |
"key_availability": key_avai[0],
|
217 |
"status": key_avai[1],
|
218 |
+
# ้่ฟๆฃๆฅ่ฟๅๅผ้ฟๅบฆๅฎๅ
จๅฐ่ทๅๆจกๅๅ่กจ
|
219 |
+
"models": key_avai[2] if len(key_avai) > 2 else []
|
220 |
}
|
221 |
return info_dict
|
222 |
|
|
|
223 |
def get_key_azure_info(endpoint: str, api_key: str) -> Dict[str, Any]:
|
224 |
key_avai = check_key_azure_availability(endpoint, api_key)
|
225 |
info_dict = {
|
|
|
463 |
|
464 |
|
465 |
# โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
|
466 |
+
# ๅไธ้ฎๅผๆญฅๅค็๏ผๅทฒๆดๆฐ๏ผ
|
467 |
# โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
|
468 |
async def process_single_key(key: str, rate_limit: bool, claude_model: str) -> Dict[str, Any]:
|
|
|
469 |
_key = key.strip()
|
470 |
|
471 |
if not _key:
|
472 |
return {"key": "", "key_type": "Empty", "key_availability": False}
|
473 |
|
|
|
474 |
if re.match(re.compile(r"sk-or-v1-[a-z0-9]{64}"), _key):
|
475 |
result = get_key_openrouter_info(_key)
|
476 |
+
return {"key": _key, **result}
|
|
|
|
|
477 |
if re.match(re.compile(r"sk-ant-api03-[a-zA-Z0-9\-_]{93}AA"), _key) or \
|
478 |
(_key.startswith("sk-ant-") and len(_key) == 93) or \
|
479 |
(len(_key) == 89 and re.match(re.compile(r"sk-[a-zA-Z0-9]{86}"), _key)):
|
480 |
result = await get_key_ant_info(_key, rate_limit, claude_model)
|
481 |
+
return {"key": _key, **result}
|
|
|
|
|
482 |
if re.match(re.compile(r"sk-[a-zA-Z0-9]{48}"), _key) and len(_key) == 51 and "T3BlbkFJ" not in _key:
|
483 |
result = get_key_stability_info(_key)
|
484 |
+
return {"key": _key, **result}
|
|
|
|
|
485 |
if re.match(re.compile(r"sk-[a-f0-9]{32}"), _key):
|
486 |
result = get_key_deepseek_info(_key)
|
487 |
+
return {"key": _key, **result}
|
|
|
|
|
488 |
if _key.startswith("sk-"):
|
489 |
result = get_key_oai_info(_key)
|
490 |
+
return {"key": _key, **result}
|
491 |
|
492 |
+
########## ๆดๆน็้จๅ ##########
|
493 |
+
# 2. process_single_key ๅจๅฝๆฐไธญๆทปๅ ็นๅฎๆจกๅๅญๅจๆงๆฃๆฅ้ป่พ
|
494 |
if _key.startswith("AIzaSy"):
|
495 |
+
gemini_info = get_key_gemini_info(_key)
|
|
|
496 |
is_working = gemini_info.get("key_availability") and gemini_info.get("status") == "Working"
|
497 |
+
|
498 |
+
# ๆฃๆฅ็นๅฎๆจกๅ("gemini-2.5-pro-preview-06-05")ๆฏๅฆๅจๅฏ็จๆจกๅๅ่กจไธญ
|
499 |
+
available_models = gemini_info.get("models", [])
|
500 |
+
has_target_model = "gemini-2.5-pro-preview-06-05" in available_models
|
501 |
+
|
502 |
result = {
|
|
|
503 |
**gemini_info,
|
504 |
+
"is_gemini_working": is_working,
|
505 |
+
"has_specific_gemini_model": has_target_model # ๆทปๅ ็จไบไฟๅญ็ปๆ็ๆ ๅฟ
|
506 |
}
|
507 |
return result
|
508 |
|
|
|
509 |
if _key.startswith("pst-"):
|
510 |
result = get_key_nai_info(_key)
|
511 |
+
return {"key": _key, **result}
|
|
|
|
|
512 |
if (_key.startswith("r8_") and len(_key) == 40) or (_key.islower() and len(_key) == 40):
|
513 |
result = get_key_replicate_info(_key)
|
514 |
+
return {"key": _key, **result}
|
|
|
|
|
515 |
if _key.startswith("xai-"):
|
516 |
result = get_key_xai_info(_key)
|
517 |
+
return {"key": _key, **result}
|
|
|
|
|
518 |
if len(_key.split(":")) == 2:
|
519 |
name, potential_key = _key.split(":", 1)
|
520 |
if re.fullmatch(r'[a-fA-F0-9]{32}', potential_key) and "openai.azure.com" not in name:
|
521 |
endpoint = f"https://{name}.openai.azure.com/"
|
522 |
api_key = potential_key
|
523 |
result = get_key_azure_info(endpoint, api_key)
|
524 |
+
return {"key": _key, **result}
|
|
|
|
|
525 |
if ";" in _key and "openai.azure.com" in _key.split(";")[0]:
|
526 |
endpoint, api_key = _key.split(";", 1)
|
527 |
result = get_key_azure_info(endpoint, api_key)
|
528 |
+
return {"key": _key, **result}
|
|
|
|
|
529 |
if _key.startswith("AKIA") and len(_key.split(":")[0]) == 20 and _key.split(":")[0].isalnum() and _key.split(":")[0].isupper() and len(_key.split(':')) == 2:
|
530 |
result = await get_key_aws_info(_key)
|
531 |
+
return {"key": _key, **result}
|
|
|
|
|
532 |
if re.fullmatch(r"[a-f0-9]{32}", _key) or re.fullmatch(r"sk_[a-f0-9]{48}", _key):
|
533 |
result = get_key_elevenlabs_info(_key)
|
534 |
+
return {"key": _key, **result}
|
|
|
|
|
535 |
if re.fullmatch(r"[a-zA-Z0-9]{32}", _key) and not _key.startswith('sk-'):
|
536 |
result = get_key_mistral_info(_key)
|
537 |
+
return {"key": _key, **result}
|
|
|
|
|
538 |
if re.match(re.compile(r"gsk_[a-zA-Z0-9]{20}WGdyb3FY[a-zA-Z0-9]{24}"), _key):
|
539 |
result = get_key_groq_info(_key)
|
540 |
+
return {"key": _key, **result}
|
|
|
|
|
541 |
if re.match(re.compile(r"[\w\-]+:[\w\-@\.]+:.+:.+"), _key):
|
542 |
parts = _key.split(':')
|
543 |
if len(parts) >= 4:
|
544 |
result = await get_key_gcp_info(_key, 0)
|
545 |
+
return {"key": _key, **result}
|
|
|
|
|
546 |
if re.match(re.compile(r"[\w\-]+:[\w\-@\.]+:.+\\n"), _key):
|
547 |
parts = _key.split(':')
|
548 |
if len(parts) >= 3:
|
549 |
result = await get_key_gcp_info(_key, 1)
|
550 |
+
return {"key": _key, **result}
|
551 |
+
result = not_supported(_key)
|
|
|
|
|
|
|
552 |
return {"key": _key, **result}
|
553 |
|
554 |
|
555 |
# โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
|
556 |
+
# ๅคไธชkeyๅผๆญฅๅค็ๅฝๆฐ๏ผๅทฒๆดๆฐ๏ผ
|
557 |
# โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
|
558 |
+
async def sort_keys(text: str, rate_limit: bool, claude_model: str) -> Tuple[List[Dict[str, Any]], str, str, str]:
|
|
|
559 |
keys = [k.strip() for k in text.splitlines() if k.strip()]
|
560 |
if not keys:
|
561 |
+
return [], "", "", ""
|
562 |
|
563 |
tasks = [process_single_key(k, rate_limit, claude_model) for k in keys]
|
564 |
results = await asyncio.gather(*tasks)
|
565 |
|
|
|
566 |
working_gemini_keys = []
|
567 |
+
working_oai_keys = []
|
568 |
+
working_anthropic_keys = []
|
569 |
+
|
570 |
for result in results:
|
571 |
+
key_value = result.get("key")
|
572 |
+
if not key_value:
|
573 |
+
continue
|
|
|
574 |
|
575 |
+
key_type = result.get("key_type")
|
576 |
+
|
577 |
+
########## ๆดๆน็้จๅ ##########
|
578 |
+
# 3. sort_keys ๅ ๅผบๅฝๆฐไธญGemini้ฎ็่ฟๆปคๆกไปถ
|
579 |
+
# 'is_gemini_working'ๅ'has_specific_gemini_model' ๆฃๆฅๆๆๆ ๅฟ
|
580 |
+
if result.get("is_gemini_working") and result.get("has_specific_gemini_model"):
|
581 |
+
working_gemini_keys.append(key_value)
|
582 |
+
elif key_type == "OpenAI" and \
|
583 |
+
result.get("key_availability") is True and \
|
584 |
+
result.get("has_sufficient_quota") is True:
|
585 |
+
working_oai_keys.append(key_value)
|
586 |
+
elif key_type == "Anthropic Claude" and result.get("key_availability") is True:
|
587 |
+
working_anthropic_keys.append(key_value)
|
588 |
+
|
589 |
+
return results, "\n".join(working_oai_keys), "\n".join(working_anthropic_keys), "\n".join(working_gemini_keys)
|
590 |
|
591 |
|
592 |
# โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
|
593 |
+
# UI util ๏ผๅทฒๆดๆฐ๏ผ
|
594 |
# โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
|
595 |
+
def clear_inputs():
|
596 |
+
return "", "", "", "", ""
|
597 |
|
598 |
|
599 |
# โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
|
600 |
+
# Gradio UI ๏ผๅทฒๆดๆฐ๏ผ
|
601 |
# โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
|
602 |
with gr.Blocks() as demo:
|
603 |
gr.Markdown(
|
604 |
"""
|
605 |
+
# Multi-API Key Status Checker (OAI, Anthropic, Gemini Enhanced)
|
606 |
+
*(Based on shaocongma, CncAnon1, su, Drago, kingbased key checkers)*
|
607 |
+
Check the status and details of various API keys including OpenAI, Anthropic, Gemini, Azure, Mistral, Replicate, AWS Claude, OpenRouter, Vertex AI (GCP Anthropic), Groq, NovelAI, ElevenLabs, xAI, Stability AI, and DeepSeek.
|
608 |
+
This version highlights working OpenAI (with sufficient quota), Anthropic, and Gemini keys in separate text boxes.
|
609 |
+
|
610 |
+
**Key Formats:**
|
611 |
+
* **AWS:** `AWS_ACCESS_KEY_ID:AWS_SECRET_ACCESS_KEY` (root might not be accurate)
|
612 |
+
* **Azure:** `RESOURCE_NAME:API_KEY` **or** `https://RESOURCE_NAME.openai.azure.com;API_KEY`
|
613 |
+
* **GCP Service Account:** `PROJECT_ID:CLIENT_EMAIL:PRIVATE_KEY` (ensure `\\n` is included for newlines in the key)
|
614 |
+
* **GCP Refresh Token:** `PROJECT_ID:CLIENT_ID:CLIENT_SECRET:REFRESH_TOKEN`
|
615 |
+
* **Other keys:** Standard format provided by the vendor.
|
616 |
"""
|
617 |
)
|
618 |
|
|
|
628 |
key_box = gr.Textbox(
|
629 |
lines=5,
|
630 |
max_lines=20,
|
631 |
+
label="API Key(s) - One per line",
|
632 |
+
placeholder="Enter one API key per line here.",
|
633 |
)
|
634 |
with gr.Row():
|
635 |
claude_model = gr.Dropdown(
|
636 |
claude_options,
|
637 |
value="claude-3-haiku-20240307",
|
638 |
+
label="Claude Model (for filter/concurrent check)",
|
639 |
scale=3
|
640 |
)
|
641 |
+
rate_limit = gr.Checkbox(label="Check Claude concurrent limit (exp.)", scale=1)
|
642 |
|
643 |
with gr.Row():
|
644 |
+
clear_button = gr.Button("Clear All")
|
645 |
+
submit_button = gr.Button("Check Keys", variant="primary")
|
646 |
|
647 |
with gr.Column(scale=2):
|
648 |
+
info = gr.JSON(label=" API Key Information (All Results)", open=True)
|
649 |
+
oai_keys_output = gr.Textbox(
|
650 |
+
label="Working OpenAI Keys (Sufficient Quota)",
|
651 |
+
info="Lists OpenAI keys confirmed as working and having sufficient quota.",
|
652 |
+
lines=3,
|
653 |
+
max_lines=10,
|
654 |
+
interactive=False,
|
655 |
+
)
|
656 |
+
anthropic_keys_output = gr.Textbox(
|
657 |
+
label="Working Anthropic Keys",
|
658 |
+
info="Lists Anthropic keys confirmed as working (key_availability is True).",
|
659 |
+
lines=3,
|
660 |
+
max_lines=10,
|
661 |
+
interactive=False,
|
662 |
+
)
|
663 |
+
|
664 |
+
########## ๅๆด็้จๅ ##########
|
665 |
+
# 4. Gemini ๆดๆฐ็ปๆๆก็ๆ ็ญพๅ่ฏดๆ๏ผไปฅๆ็กฎๆฐ็็ญ้ๆ ๅ
|
666 |
gemini_keys_output = gr.Textbox(
|
667 |
+
label="Working Gemini Keys (with gemini-2.5-pro-preview-06-05)",
|
668 |
+
info="Lists Gemini keys that are 'Working' AND have 'gemini-2.5-pro-preview-06-05' in their available models.",
|
669 |
lines=3,
|
670 |
max_lines=10,
|
671 |
+
interactive=False,
|
672 |
)
|
673 |
|
|
|
674 |
clear_button.click(
|
675 |
+
fn=clear_inputs,
|
676 |
inputs=None,
|
677 |
+
outputs=[key_box, info, oai_keys_output, anthropic_keys_output, gemini_keys_output]
|
678 |
)
|
679 |
|
|
|
680 |
submit_button.click(
|
681 |
fn=sort_keys,
|
682 |
inputs=[key_box, rate_limit, claude_model],
|
683 |
+
outputs=[info, oai_keys_output, anthropic_keys_output, gemini_keys_output],
|
684 |
api_name="sort_keys",
|
685 |
)
|
686 |
|
687 |
+
demo.launch()
|
|