Spaces:
Runtime error
Runtime error
| import os | |
| import gradio as gr | |
| import requests | |
| import inspect | |
| import pandas as pd | |
| from smolagents.models import InferenceClientModel | |
| from smolagents import ToolCallingAgent | |
| from smolagents import DuckDuckGoSearchTool | |
| from smolagents import Tool | |
| import traceback | |
| from huggingface_hub import InferenceClient, HfApi | |
| from huggingface_hub.utils import HfHubHTTPError | |
| # (Keep Constants as is) | |
| # --- Constants --- | |
| DEFAULT_API_URL = "https://agents-course-unit4-scoring.hf.space" | |
| print("Token loaded:", bool(os.getenv("chatbotagenthf"))) | |
| from huggingface_hub import InferenceClient, HfApi | |
| api_key=os.getenv("chatbotagenthf") | |
| api = HfApi() | |
| client = InferenceClient(token=api_key) | |
| # Example list of chat/instruction models to test | |
| models = [ | |
| "mistralai/Mistral-7B-Instruct-v0.3", | |
| "HuggingFaceH4/zephyr-7b-beta", | |
| "google/gemma-2b-it", | |
| "tiiuae/falcon-7b-instruct", | |
| "meta-llama/Llama-3.1-8B-Instruct" | |
| ] | |
| client = InferenceClient() | |
| api = HfApi() | |
| for model in models: | |
| print(f"π Testing {model}...") | |
| try: | |
| # just check if model exists and is accessible | |
| api.model_info(model, token="your_HF_token_here") | |
| print(f"β Accessible: {model}") | |
| except HfHubHTTPError as e: | |
| print(f"β Not accessible: {model} -> {e}") |