alokik29 commited on
Commit
63e6a46
Β·
verified Β·
1 Parent(s): f89f29a

Upload 3 files

Browse files
Files changed (4) hide show
  1. .gitattributes +1 -0
  2. Chinook.db +3 -0
  3. app (1).py +73 -0
  4. requirements (1).txt +11 -0
.gitattributes CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ Chinook.db filter=lfs diff=lfs merge=lfs -text
Chinook.db ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7651ba378ac2fcd0dfc3c66fb101f7a7eed3ba39a612ec642b96e20702061f15
3
+ size 1007616
app (1).py ADDED
@@ -0,0 +1,73 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import sqlite3
2
+ import pandas as pd
3
+ import gradio as gr
4
+ from langchain_community.llms import HuggingFacePipeline
5
+ from transformers import AutoTokenizer, AutoModelForCausalLM, pipeline
6
+
7
+ # ============================================================
8
+ # πŸš€ Load SQLCoder model
9
+ # ============================================================
10
+ model_id = "defog/sqlcoder-7b-2"
11
+
12
+ tokenizer = AutoTokenizer.from_pretrained(model_id)
13
+ model = AutoModelForCausalLM.from_pretrained(
14
+ model_id,
15
+ torch_dtype="auto",
16
+ device_map="auto"
17
+ )
18
+
19
+ pipe = pipeline(
20
+ "text-generation",
21
+ model=model,
22
+ tokenizer=tokenizer,
23
+ max_new_tokens=256,
24
+ do_sample=False
25
+ )
26
+
27
+ sqlcoder_llm = HuggingFacePipeline(pipeline=pipe)
28
+
29
+ # ============================================================
30
+ # 🧠 Define query function
31
+ # ============================================================
32
+ def ask_question(user_db, question):
33
+ """Takes an uploaded SQLite database + a question, returns SQL + result"""
34
+ if not user_db:
35
+ return "❌ Please upload a database file.", None
36
+
37
+ conn = sqlite3.connect(user_db.name)
38
+ cursor = conn.cursor()
39
+
40
+ # Create a Text-to-SQL prompt
41
+ prompt = f"Translate this question into an SQLite query:\nQuestion: {question}\nSQL:"
42
+ sql_query = sqlcoder_llm(prompt)
43
+
44
+ try:
45
+ cursor.execute(sql_query)
46
+ rows = cursor.fetchall()
47
+ columns = [desc[0] for desc in cursor.description]
48
+ df = pd.DataFrame(rows, columns=columns)
49
+ conn.close()
50
+ return sql_query, df
51
+ except Exception as e:
52
+ conn.close()
53
+ return f"❌ Error executing query: {e}", None
54
+
55
+ # ============================================================
56
+ # 🎨 Gradio UI
57
+ # ============================================================
58
+ demo = gr.Interface(
59
+ fn=ask_question,
60
+ inputs=[
61
+ gr.File(label="Upload SQLite Database (.db)"),
62
+ gr.Textbox(label="Ask your question")
63
+ ],
64
+ outputs=[
65
+ gr.Textbox(label="Generated SQL Query"),
66
+ gr.Dataframe(label="Query Result")
67
+ ],
68
+ title="🧠 Text-to-SQL on Your Own Database",
69
+ description="Upload your SQLite database and ask natural language questions."
70
+ )
71
+
72
+ if __name__ == "__main__":
73
+ demo.launch()
requirements (1).txt ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ gradio
2
+ langchain
3
+ langchain-community
4
+ langchain-huggingface
5
+ transformers
6
+ accelerate
7
+ bitsandbytes
8
+ chromadb
9
+ sqlalchemy
10
+ huggingface_hub
11
+ pandas