Spaces:
Sleeping
Sleeping
| import gradio as gr | |
| from huggingface_hub import InferenceClient | |
| # Initialize inference client (no 'provider' argument in latest API) | |
| client = InferenceClient( | |
| "HuggingFaceTB/SmolLM3-3B", | |
| # token=os.getenv("HF_TOKEN") | |
| ) | |
| def zigistry_ai(user_query, package_data): | |
| messages = [ | |
| { | |
| "role": "system", | |
| "content": ( | |
| "You are ZigistryAI, an expert assistant that understands the Zig programming language, " | |
| "its ecosystem, and package data. You read structured package info and determine the best matches " | |
| "to a user's request. Always explain reasoning briefly." | |
| ), | |
| }, | |
| { | |
| "role": "user", | |
| "content": f"{user_query}\nData: {package_data}", | |
| }, | |
| ] | |
| completion = client.chat.completions.create( | |
| messages=messages, | |
| temperature=0.3, | |
| ) | |
| return completion.choices[0].message.content | |
| # Gradio interface | |
| with gr.Blocks() as demo: | |
| gr.Markdown("## 🤖 Zigistry AI — Package Language Filter") | |
| with gr.Row(): | |
| user_query = gr.Textbox(label="Your Query", value="Which of the following packages are implemented entirely in Zig?", lines=2) | |
| with gr.Row(): | |
| package_data = gr.Textbox(label="Package Data (JSON)", lines=10, value='''[ | |
| { | |
| "name": "zig-http", | |
| "description": "Pure Zig HTTP server with no C dependencies", | |
| "language_mix": "zig_only" | |
| }, | |
| { | |
| "name": "c-wrapper", | |
| "description": "Zig bindings for a C library", | |
| "language_mix": "zig_and_c" | |
| } | |
| ]''') | |
| with gr.Row(): | |
| output = gr.Textbox(label="AI Response", lines=6) | |
| submit_btn = gr.Button("Ask Zigistry AI") | |
| submit_btn.click(zigistry_ai, inputs=[user_query, package_data], outputs=output) | |
| demo.launch() | |