|
|
import gradio as gr |
|
|
from transformers import AutoModelForCausalLM, AutoTokenizer |
|
|
import torch |
|
|
|
|
|
|
|
|
model_id = "HridaAI/Hrida-T2SQL-3B-128k-V0.1" |
|
|
tokenizer = AutoTokenizer.from_pretrained(model_id, trust_remote_code=True) |
|
|
model = AutoModelForCausalLM.from_pretrained(model_id, torch_dtype=torch.float16, trust_remote_code=True) |
|
|
|
|
|
|
|
|
def generate_sql(query): |
|
|
|
|
|
inputs = tokenizer(query, return_tensors="pt") |
|
|
|
|
|
|
|
|
outputs = model.generate(**inputs, max_new_tokens=256) |
|
|
|
|
|
|
|
|
sql_query = tokenizer.decode(outputs[0], skip_special_tokens=True) |
|
|
|
|
|
return sql_query |
|
|
|
|
|
|
|
|
iface = gr.Interface( |
|
|
fn=generate_sql, |
|
|
inputs=gr.Textbox(lines=2, placeholder="Enter your natural language question here..."), |
|
|
outputs="text", |
|
|
title="Text to SQL Converter", |
|
|
description="Convert natural language questions into SQL queries using the Hrida-T2SQL-3B model." |
|
|
) |
|
|
|
|
|
|
|
|
iface.launch() |
|
|
|