# Load model directly
from transformers import AutoTokenizer, AutoModelForCausalLM
tokenizer = AutoTokenizer.from_pretrained("defog/llama-3-sqlcoder-8b")
model = AutoModelForCausalLM.from_pretrained("defog/llama-3-sqlcoder-8b")
messages = [
{"role": "user", "content": "Who are you?"},
]
inputs = tokenizer.apply_chat_template(
messages,
add_generation_prompt=True,
tokenize=True,
return_dict=True,
return_tensors="pt",
).to(model.device)
outputs = model.generate(**inputs, max_new_tokens=40)
print(tokenizer.decode(outputs[0][inputs["input_ids"].shape[-1]:]))Quick Links
A capable language model for text to SQL generation for Postgres, Redshift and Snowflake that is on-par with the most capable generalist frontier models.
Model Description
Developed by: Defog, Inc Model type: [Text to SQL] License: [CC-by-SA-4.0] Finetuned from model: [Meta-Llama-3-8B-Instruct]
Demo Page
https://defog.ai/sqlcoder-demo/
Ideal prompt and inference parameters
Set temperature to 0, and do not do sampling.
Prompt
<|begin_of_text|><|start_header_id|>user<|end_header_id|>
Generate a SQL query to answer this question: `{user_question}`
{instructions}
DDL statements:
{create_table_statements}<|eot_id|><|start_header_id|>assistant<|end_header_id|>
The following SQL query best answers the question `{user_question}`:
```sql
Evaluation
This model was evaluated on SQL-Eval, a PostgreSQL based evaluation framework developed by Defog for testing and alignment of model capabilities.
You can read more about the methodology behind SQLEval here.
Contact
Contact us on X at @defogdata, or on email at founders@defog.ai
- Downloads last month
- 2,131

# Use a pipeline as a high-level helper from transformers import pipeline pipe = pipeline("text-generation", model="defog/llama-3-sqlcoder-8b") messages = [ {"role": "user", "content": "Who are you?"}, ] pipe(messages)