import gradio as gr from transformers import AutoModelForCausalLM, AutoTokenizer import torch # Load the pre-trained model and tokenizer from Hugging Face Model Hub model_id = "HridaAI/Hrida-T2SQL-3B-128k-V0.1" tokenizer = AutoTokenizer.from_pretrained(model_id, trust_remote_code=True) model = AutoModelForCausalLM.from_pretrained(model_id, torch_dtype=torch.float16, trust_remote_code=True) # Define the function to generate SQL query from natural language input def generate_sql(query): # Tokenize input inputs = tokenizer(query, return_tensors="pt") # Generate the SQL query outputs = model.generate(**inputs, max_new_tokens=256) # Decode the generated output into a string sql_query = tokenizer.decode(outputs[0], skip_special_tokens=True) return sql_query # Create the Gradio interface iface = gr.Interface( fn=generate_sql, inputs=gr.Textbox(lines=2, placeholder="Enter your natural language question here..."), outputs="text", title="Text to SQL Converter", description="Convert natural language questions into SQL queries using the Hrida-T2SQL-3B model." ) # Launch the interface iface.launch()