Commit
·
0adb2ff
1
Parent(s):
364a0a3
Auto-refresh litellm prices when model not found
Browse files
app.py
CHANGED
|
@@ -868,6 +868,25 @@ def load_all_trajectory_steps(folder: str) -> dict[str, list[dict]]:
|
|
| 868 |
return result
|
| 869 |
|
| 870 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 871 |
def get_litellm_prices_raw() -> dict:
|
| 872 |
"""Get raw litellm prices (all modes, unfiltered)"""
|
| 873 |
global _litellm_prices_cache
|
|
@@ -918,14 +937,9 @@ def normalize_model_name(name: str) -> str:
|
|
| 918 |
return re.sub(r'[-_./]', '', name.lower())
|
| 919 |
|
| 920 |
|
| 921 |
-
def
|
| 922 |
-
|
| 923 |
-
return None
|
| 924 |
-
|
| 925 |
-
prices = get_litellm_prices()
|
| 926 |
-
|
| 927 |
clean_name = model_name.replace("anthropic/", "").replace("openai/", "")
|
| 928 |
-
|
| 929 |
name_without_date = re.sub(r'-\d{8}$', '', clean_name)
|
| 930 |
|
| 931 |
candidates = [
|
|
@@ -957,6 +971,24 @@ def get_model_prices(model_name: str) -> dict | None:
|
|
| 957 |
return None
|
| 958 |
|
| 959 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 960 |
def load_or_download_leaderboard(force_refresh: bool = False):
|
| 961 |
if not force_refresh and LEADERBOARD_CACHE.exists():
|
| 962 |
with open(LEADERBOARD_CACHE) as f:
|
|
|
|
| 868 |
return result
|
| 869 |
|
| 870 |
|
| 871 |
+
def refresh_litellm_prices() -> bool:
|
| 872 |
+
"""Force refresh litellm prices from remote. Returns True if successful."""
|
| 873 |
+
global _litellm_prices_cache, _litellm_chat_prices_cache
|
| 874 |
+
try:
|
| 875 |
+
response = requests.get(LITELLM_PRICES_URL, timeout=30)
|
| 876 |
+
response.raise_for_status()
|
| 877 |
+
_litellm_prices_cache = response.json()
|
| 878 |
+
_litellm_chat_prices_cache = None
|
| 879 |
+
|
| 880 |
+
DATA_DIR.mkdir(exist_ok=True)
|
| 881 |
+
with open(LITELLM_PRICES_CACHE, "w") as f:
|
| 882 |
+
json.dump(_litellm_prices_cache, f)
|
| 883 |
+
logging.info("Successfully refreshed litellm prices")
|
| 884 |
+
return True
|
| 885 |
+
except Exception as e:
|
| 886 |
+
logging.warning(f"Failed to refresh litellm prices: {e}")
|
| 887 |
+
return False
|
| 888 |
+
|
| 889 |
+
|
| 890 |
def get_litellm_prices_raw() -> dict:
|
| 891 |
"""Get raw litellm prices (all modes, unfiltered)"""
|
| 892 |
global _litellm_prices_cache
|
|
|
|
| 937 |
return re.sub(r'[-_./]', '', name.lower())
|
| 938 |
|
| 939 |
|
| 940 |
+
def _search_model_in_prices(model_name: str, prices: dict) -> dict | None:
|
| 941 |
+
"""Search for model in prices dict using various name variations."""
|
|
|
|
|
|
|
|
|
|
|
|
|
| 942 |
clean_name = model_name.replace("anthropic/", "").replace("openai/", "")
|
|
|
|
| 943 |
name_without_date = re.sub(r'-\d{8}$', '', clean_name)
|
| 944 |
|
| 945 |
candidates = [
|
|
|
|
| 971 |
return None
|
| 972 |
|
| 973 |
|
| 974 |
+
def get_model_prices(model_name: str) -> dict | None:
|
| 975 |
+
if not model_name:
|
| 976 |
+
return None
|
| 977 |
+
|
| 978 |
+
prices = get_litellm_prices()
|
| 979 |
+
result = _search_model_in_prices(model_name, prices)
|
| 980 |
+
|
| 981 |
+
if result is None and LITELLM_PRICES_CACHE.exists():
|
| 982 |
+
logging.info(f"Model '{model_name}' not found in litellm prices, refreshing cache...")
|
| 983 |
+
if refresh_litellm_prices():
|
| 984 |
+
prices = get_litellm_prices()
|
| 985 |
+
result = _search_model_in_prices(model_name, prices)
|
| 986 |
+
if result is None:
|
| 987 |
+
logging.warning(f"Model '{model_name}' still not found after refresh")
|
| 988 |
+
|
| 989 |
+
return result
|
| 990 |
+
|
| 991 |
+
|
| 992 |
def load_or_download_leaderboard(force_refresh: bool = False):
|
| 993 |
if not force_refresh and LEADERBOARD_CACHE.exists():
|
| 994 |
with open(LEADERBOARD_CACHE) as f:
|