TestLLMGen / app.py.bak
VcRlAgent's picture
Testing Natural Lang SQL
4335673
import os
import gradio as gr
from openai import OpenAI
from huggingface_hub import InferenceClient
# Initialize HF Router client using OpenAI SDK
'''
client = OpenAI(
base_url="https://router.huggingface.co/v1",
api_key=os.environ["HF_TOKEN"], # ensure HF_TOKEN is set
)
'''
client = InferenceClient(api_key=os.environ["HF_TOKEN"])
# LLM function
def ask_llm(prompt):
try:
completion = client.chat.completions.create(
model="meta-llama/Llama-3.1-8B-Instruct",
messages=[
{"role": "user", "content": prompt}
],
max_tokens=200,
temperature=0.7
)
#return completion.choices[0].message["content"]
return completion.choices[0].message.content
except Exception as e:
return f"Error: {str(e)}"
# Build Gradio UI
demo = gr.Interface(
fn=ask_llm,
inputs=gr.Textbox(lines=3, label="Ask the AI"),
outputs=gr.Textbox(label="Response"),
title="HF Inference Client LLM Demo",
description="Powered by HuggingFace InferenceClient SDK."
)
demo.launch()