Update app.py
Browse files
app.py
CHANGED
|
@@ -12,7 +12,7 @@ from api_usage import (
|
|
| 12 |
get_orgs_me,
|
| 13 |
check_key_ant_availability,
|
| 14 |
check_ant_rate_limit,
|
| 15 |
-
check_key_gemini_availability,
|
| 16 |
check_key_azure_availability,
|
| 17 |
get_azure_status,
|
| 18 |
get_azure_deploy,
|
|
@@ -204,19 +204,22 @@ async def get_key_ant_info(key: str, rate_limit: bool, claude_model: str) -> Dic
|
|
| 204 |
|
| 205 |
return info_dict
|
| 206 |
|
| 207 |
-
|
| 208 |
-
#
|
| 209 |
def get_key_gemini_info(key: str) -> Dict[str, Any]:
|
| 210 |
-
"""Gemini ํค ์ ๋ณด๋ฅผ
|
| 211 |
-
|
|
|
|
| 212 |
info_dict = {
|
| 213 |
-
"key": key,
|
|
|
|
| 214 |
"key_availability": key_avai[0],
|
| 215 |
"status": key_avai[1],
|
|
|
|
|
|
|
| 216 |
}
|
| 217 |
return info_dict
|
| 218 |
|
| 219 |
-
|
| 220 |
def get_key_azure_info(endpoint: str, api_key: str) -> Dict[str, Any]:
|
| 221 |
key_avai = check_key_azure_availability(endpoint, api_key)
|
| 222 |
info_dict = {
|
|
@@ -485,15 +488,24 @@ async def process_single_key(key: str, rate_limit: bool, claude_model: str) -> D
|
|
| 485 |
if _key.startswith("sk-"):
|
| 486 |
result = get_key_oai_info(_key)
|
| 487 |
return {"key": _key, **result}
|
|
|
|
|
|
|
|
|
|
| 488 |
if _key.startswith("AIzaSy"):
|
| 489 |
gemini_info = get_key_gemini_info(_key)
|
| 490 |
is_working = gemini_info.get("key_availability") and gemini_info.get("status") == "Working"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 491 |
result = {
|
| 492 |
-
"key_type": "Google Gemini",
|
| 493 |
**gemini_info,
|
| 494 |
-
"is_gemini_working": is_working
|
|
|
|
| 495 |
}
|
| 496 |
return result
|
|
|
|
| 497 |
if _key.startswith("pst-"):
|
| 498 |
result = get_key_nai_info(_key)
|
| 499 |
return {"key": _key, **result}
|
|
@@ -562,7 +574,10 @@ async def sort_keys(text: str, rate_limit: bool, claude_model: str) -> Tuple[Lis
|
|
| 562 |
|
| 563 |
key_type = result.get("key_type")
|
| 564 |
|
| 565 |
-
|
|
|
|
|
|
|
|
|
|
| 566 |
working_gemini_keys.append(key_value)
|
| 567 |
elif key_type == "OpenAI" and \
|
| 568 |
result.get("key_availability") is True and \
|
|
@@ -645,9 +660,12 @@ This version highlights working OpenAI (with sufficient quota), Anthropic, and G
|
|
| 645 |
max_lines=10,
|
| 646 |
interactive=False,
|
| 647 |
)
|
|
|
|
|
|
|
|
|
|
| 648 |
gemini_keys_output = gr.Textbox(
|
| 649 |
-
label="Working Gemini Keys",
|
| 650 |
-
info="Lists Gemini keys
|
| 651 |
lines=3,
|
| 652 |
max_lines=10,
|
| 653 |
interactive=False,
|
|
@@ -666,5 +684,4 @@ This version highlights working OpenAI (with sufficient quota), Anthropic, and G
|
|
| 666 |
api_name="sort_keys",
|
| 667 |
)
|
| 668 |
|
| 669 |
-
# demo.launch(share=True)
|
| 670 |
demo.launch()
|
|
|
|
| 12 |
get_orgs_me,
|
| 13 |
check_key_ant_availability,
|
| 14 |
check_ant_rate_limit,
|
| 15 |
+
check_key_gemini_availability,
|
| 16 |
check_key_azure_availability,
|
| 17 |
get_azure_status,
|
| 18 |
get_azure_deploy,
|
|
|
|
| 204 |
|
| 205 |
return info_dict
|
| 206 |
|
| 207 |
+
########## ๋ณ๊ฒฝ๋ ๋ถ๋ถ ##########
|
| 208 |
+
# 1. get_key_gemini_info ํจ์๊ฐ ๋ชจ๋ธ ๋ชฉ๋ก์ ํฌํจํ๋๋ก ์์
|
| 209 |
def get_key_gemini_info(key: str) -> Dict[str, Any]:
|
| 210 |
+
"""Gemini ํค ์ ๋ณด๋ฅผ ๊ฐ์ ธ์ค๊ณ ๋ชจ๋ธ ๋ชฉ๋ก์ ํฌํจํฉ๋๋ค."""
|
| 211 |
+
# check_key_gemini_availability๊ฐ (bool, str, list_of_models) ํํ์ ๋ฐํํ๋ค๊ณ ๊ฐ์
|
| 212 |
+
key_avai = check_key_gemini_availability(key)
|
| 213 |
info_dict = {
|
| 214 |
+
"key": key,
|
| 215 |
+
"key_type": "Google Gemini",
|
| 216 |
"key_availability": key_avai[0],
|
| 217 |
"status": key_avai[1],
|
| 218 |
+
# ๋ฐํ๊ฐ ๊ธธ์ด๋ฅผ ํ์ธํ์ฌ ์์ ํ๊ฒ ๋ชจ๋ธ ๋ชฉ๋ก์ ๊ฐ์ ธ์ด
|
| 219 |
+
"models": key_avai[2] if len(key_avai) > 2 else []
|
| 220 |
}
|
| 221 |
return info_dict
|
| 222 |
|
|
|
|
| 223 |
def get_key_azure_info(endpoint: str, api_key: str) -> Dict[str, Any]:
|
| 224 |
key_avai = check_key_azure_availability(endpoint, api_key)
|
| 225 |
info_dict = {
|
|
|
|
| 488 |
if _key.startswith("sk-"):
|
| 489 |
result = get_key_oai_info(_key)
|
| 490 |
return {"key": _key, **result}
|
| 491 |
+
|
| 492 |
+
########## ๋ณ๊ฒฝ๋ ๋ถ๋ถ ##########
|
| 493 |
+
# 2. process_single_key ํจ์์์ ํน์ ๋ชจ๋ธ ์กด์ฌ ์ฌ๋ถ ํ์ธ ๋ก์ง ์ถ๊ฐ
|
| 494 |
if _key.startswith("AIzaSy"):
|
| 495 |
gemini_info = get_key_gemini_info(_key)
|
| 496 |
is_working = gemini_info.get("key_availability") and gemini_info.get("status") == "Working"
|
| 497 |
+
|
| 498 |
+
# ํน์ ๋ชจ๋ธ("gemini-2.5-pro-preview-06-05")์ด ์ฌ์ฉ ๊ฐ๋ฅํ ๋ชจ๋ธ ๋ชฉ๋ก์ ์๋์ง ํ์ธ
|
| 499 |
+
available_models = gemini_info.get("models", [])
|
| 500 |
+
has_target_model = "gemini-2.5-pro-preview-06-05" in available_models
|
| 501 |
+
|
| 502 |
result = {
|
|
|
|
| 503 |
**gemini_info,
|
| 504 |
+
"is_gemini_working": is_working,
|
| 505 |
+
"has_specific_gemini_model": has_target_model # ๊ฒฐ๊ณผ๋ฅผ ์ ์ฅํ ํ๋๊ทธ ์ถ๊ฐ
|
| 506 |
}
|
| 507 |
return result
|
| 508 |
+
|
| 509 |
if _key.startswith("pst-"):
|
| 510 |
result = get_key_nai_info(_key)
|
| 511 |
return {"key": _key, **result}
|
|
|
|
| 574 |
|
| 575 |
key_type = result.get("key_type")
|
| 576 |
|
| 577 |
+
########## ๋ณ๊ฒฝ๋ ๋ถ๋ถ ##########
|
| 578 |
+
# 3. sort_keys ํจ์์์ Gemini ํค ํํฐ๋ง ์กฐ๊ฑด ๊ฐํ
|
| 579 |
+
# 'is_gemini_working'๊ณผ 'has_specific_gemini_model' ํ๋๊ทธ๋ฅผ ๋ชจ๋ ํ์ธ
|
| 580 |
+
if result.get("is_gemini_working") and result.get("has_specific_gemini_model"):
|
| 581 |
working_gemini_keys.append(key_value)
|
| 582 |
elif key_type == "OpenAI" and \
|
| 583 |
result.get("key_availability") is True and \
|
|
|
|
| 660 |
max_lines=10,
|
| 661 |
interactive=False,
|
| 662 |
)
|
| 663 |
+
|
| 664 |
+
########## ๋ณ๊ฒฝ๋ ๋ถ๋ถ ##########
|
| 665 |
+
# 4. Gemini ๊ฒฐ๊ณผ ์์์ ๋ ์ด๋ธ๊ณผ ์ค๋ช
์ ์
๋ฐ์ดํธํ์ฌ ์๋ก์ด ํํฐ๋ง ๊ธฐ์ค์ ๋ช
์
|
| 666 |
gemini_keys_output = gr.Textbox(
|
| 667 |
+
label="Working Gemini Keys (with gemini-2.5-pro-preview-06-05)",
|
| 668 |
+
info="Lists Gemini keys that are 'Working' AND have 'gemini-2.5-pro-preview-06-05' in their available models.",
|
| 669 |
lines=3,
|
| 670 |
max_lines=10,
|
| 671 |
interactive=False,
|
|
|
|
| 684 |
api_name="sort_keys",
|
| 685 |
)
|
| 686 |
|
|
|
|
| 687 |
demo.launch()
|