Spaces:
Runtime error
Runtime error
Upload folder using huggingface_hub
Browse files- .gitignore +52 -0
- README.md +3 -9
- app.py +140 -0
- db.py +245 -0
- db_logging.py +161 -0
- db_setup.py +31 -0
- openai_integration.py +194 -0
- requirements.txt +65 -0
- schema.json +802 -0
- schema.md +195 -0
.gitignore
ADDED
|
@@ -0,0 +1,52 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Byte-compiled / optimized / DLL files
|
| 2 |
+
__pycache__/
|
| 3 |
+
*.py[cod]
|
| 4 |
+
*$py.class
|
| 5 |
+
|
| 6 |
+
# C extensions
|
| 7 |
+
*.so
|
| 8 |
+
|
| 9 |
+
# Packages
|
| 10 |
+
*.egg
|
| 11 |
+
*.egg-info/
|
| 12 |
+
dist/
|
| 13 |
+
build/
|
| 14 |
+
eggs/
|
| 15 |
+
wheels/
|
| 16 |
+
*.egg-info/
|
| 17 |
+
.installed.cfg
|
| 18 |
+
*.egg
|
| 19 |
+
|
| 20 |
+
# Virtual environments
|
| 21 |
+
env/
|
| 22 |
+
venv/
|
| 23 |
+
ENV/
|
| 24 |
+
env.bak/
|
| 25 |
+
venv.bak/
|
| 26 |
+
.venv
|
| 27 |
+
|
| 28 |
+
# SQLite database
|
| 29 |
+
*.db
|
| 30 |
+
|
| 31 |
+
# dotenv environment variables
|
| 32 |
+
.env
|
| 33 |
+
|
| 34 |
+
# Jupyter Notebook checkpoints
|
| 35 |
+
.ipynb_checkpoints
|
| 36 |
+
|
| 37 |
+
# Gradio cache and state files
|
| 38 |
+
gradio_cache/
|
| 39 |
+
gradio_state/
|
| 40 |
+
|
| 41 |
+
# Logs and databases
|
| 42 |
+
*.log
|
| 43 |
+
*.sqlite3
|
| 44 |
+
*.sqlite
|
| 45 |
+
query_logs.db
|
| 46 |
+
|
| 47 |
+
# OS specific files
|
| 48 |
+
.DS_Store
|
| 49 |
+
Thumbs.db
|
| 50 |
+
|
| 51 |
+
# Backup
|
| 52 |
+
BU/
|
README.md
CHANGED
|
@@ -1,12 +1,6 @@
|
|
| 1 |
---
|
| 2 |
-
title:
|
| 3 |
-
emoji: 👁
|
| 4 |
-
colorFrom: red
|
| 5 |
-
colorTo: gray
|
| 6 |
-
sdk: gradio
|
| 7 |
-
sdk_version: 4.44.0
|
| 8 |
app_file: app.py
|
| 9 |
-
|
|
|
|
| 10 |
---
|
| 11 |
-
|
| 12 |
-
Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
|
|
|
|
| 1 |
---
|
| 2 |
+
title: ai_eee_sql_gen
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 3 |
app_file: app.py
|
| 4 |
+
sdk: gradio
|
| 5 |
+
sdk_version: 4.43.0
|
| 6 |
---
|
|
|
|
|
|
app.py
ADDED
|
@@ -0,0 +1,140 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import gradio as gr
|
| 2 |
+
import logging
|
| 3 |
+
|
| 4 |
+
from openai_integration import generate_sql
|
| 5 |
+
from db import get_last_50_saved_queries, initialize_local_db, export_saved_queries_to_csv, execute_sql_query, fetch_and_save_schema, show_last_50_saved_queries # Import the correct function
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
# Initialize logging
|
| 9 |
+
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname=s - %(message)s')
|
| 10 |
+
|
| 11 |
+
# Call the function to ensure the table is created
|
| 12 |
+
initialize_local_db()
|
| 13 |
+
|
| 14 |
+
# Function to handle user query input and SQL generation with progress
|
| 15 |
+
def query_database(nl_query, progress=gr.Progress()):
|
| 16 |
+
try:
|
| 17 |
+
progress(0, desc="Starting Query Process")
|
| 18 |
+
|
| 19 |
+
# Generate SQL and reformulated query
|
| 20 |
+
progress(0.2, desc="Generating Reformulated Query")
|
| 21 |
+
reformulated_query, sql_query = generate_sql(nl_query)
|
| 22 |
+
|
| 23 |
+
# Default empty result in case of SQL query failure
|
| 24 |
+
execution_result = []
|
| 25 |
+
|
| 26 |
+
# If we have a SQL query, attempt execution
|
| 27 |
+
if sql_query and not sql_query.startswith("Error"):
|
| 28 |
+
progress(0.5, desc="Executing SQL Query")
|
| 29 |
+
execution_result = execute_sql_query(sql_query)
|
| 30 |
+
|
| 31 |
+
# Ensure execution_result is in a valid format for a DataFrame
|
| 32 |
+
if not isinstance(execution_result, list) or len(execution_result) == 0:
|
| 33 |
+
execution_result = [["No results available."]]
|
| 34 |
+
else:
|
| 35 |
+
execution_result = [["No results available."]]
|
| 36 |
+
|
| 37 |
+
progress(1, desc="Query Completed")
|
| 38 |
+
return reformulated_query, sql_query, execution_result
|
| 39 |
+
|
| 40 |
+
except Exception as e:
|
| 41 |
+
logging.error(f"Error during query generation or execution: {e}")
|
| 42 |
+
return "Error during query processing.", "", [["No results available due to an error."]]
|
| 43 |
+
|
| 44 |
+
# Function to update the schema when requested
|
| 45 |
+
def update_schema():
|
| 46 |
+
schema_info = fetch_and_save_schema()
|
| 47 |
+
|
| 48 |
+
# Case 1: Check if there is an actual error in the schema fetch process
|
| 49 |
+
if "error" in schema_info:
|
| 50 |
+
raise gr.Error("Error fetching schema from the database.", duration=3)
|
| 51 |
+
|
| 52 |
+
# Case 2: Check if the schema is empty
|
| 53 |
+
if not schema_info: # Empty dictionary or None
|
| 54 |
+
raise gr.Error("No schema data was returned. The schema is empty.", duration=3)
|
| 55 |
+
|
| 56 |
+
# Case 3: Schema successfully fetched
|
| 57 |
+
return "Query executed successfully", gr.Info("DB Schema Updated ℹ️", duration=3)
|
| 58 |
+
|
| 59 |
+
|
| 60 |
+
# Function to make hidden components visible after the process
|
| 61 |
+
def continue_process():
|
| 62 |
+
# Ensure both the SQL and result outputs are shown (since reformulated_output is always visible now)
|
| 63 |
+
return gr.update(visible=True), gr.update(visible=True)
|
| 64 |
+
|
| 65 |
+
# Function to reset the interface to its initial state
|
| 66 |
+
def reset_interface():
|
| 67 |
+
return gr.update(value=""), gr.update(value=""), gr.update(visible=False), gr.update(visible=False), gr.update(interactive=False)
|
| 68 |
+
|
| 69 |
+
# Enable the submit button only when text is entered
|
| 70 |
+
def update_button_state(text):
|
| 71 |
+
if text.strip():
|
| 72 |
+
return gr.update(interactive=True)
|
| 73 |
+
else:
|
| 74 |
+
return gr.update(interactive=False)
|
| 75 |
+
|
| 76 |
+
# Gradio interface setup
|
| 77 |
+
with gr.Blocks(theme=gr.themes.Soft(font=[gr.themes.GoogleFont("Ubuntu"), "Arial", "sans-serif"], text_size='sm')) as ydcoza_face:
|
| 78 |
+
text_input = gr.Textbox(lines=2, label="Text Query")
|
| 79 |
+
examples = gr.Examples(examples=[
|
| 80 |
+
"I'm trying to figure out which agents are the busiest. Can you show me like the top few agents who have a lot on their plate? I'd like to see their names and maybe their contact info if we have it.",
|
| 81 |
+
"I need to get an overview of our classes. Could you pull up a list that shows what each class is about and which client it's for? Oh, and it would be great to know how many students are in each class. Maybe order it so the biggest classes are at the top?",
|
| 82 |
+
"Can you give me an overview of all our classes? I'd like to see how many students are in each class and how diverse they are in terms of race. It would be helpful to see the class subject and location too. Don't leave out any classes, even if they have few or no students.",
|
| 83 |
+
"I'm curious about how our classes are doing. Can you show me a list of all the classes and their latest test results or evaluations? It would be helpful to see where each class is located too.",
|
| 84 |
+
"I heard some agents got moved around recently. Can you find out which agents have been switched to different classes? I'd like to know where they were before, where they are now, and maybe why they were moved."
|
| 85 |
+
],example_labels=["01","02","03","04","05"], label="Demo Natural Language Queries",inputs=[text_input])
|
| 86 |
+
reformulated_output = gr.Textbox(lines=2, label="Optimised Query", elem_id='ydcoza_markdown_output_desc')
|
| 87 |
+
sql_output = gr.Code(label="Generated SQL", visible=False)
|
| 88 |
+
sql_result_output = gr.Dataframe(label="Query Results", elem_id='result_output', visible=False) # Dataframe for SQL results
|
| 89 |
+
start_button = gr.Button("Submit Text Query", elem_id='ydcoza_gradio_button', interactive=False)
|
| 90 |
+
|
| 91 |
+
# Add reset button to reset the interface
|
| 92 |
+
reset_button = gr.Button("Reset Interface", elem_id='ydcoza_gradio_button_reset')
|
| 93 |
+
reset_button.click(
|
| 94 |
+
fn=reset_interface,
|
| 95 |
+
inputs=[],
|
| 96 |
+
outputs=[text_input, reformulated_output, sql_output, sql_result_output, start_button]
|
| 97 |
+
)
|
| 98 |
+
gr.HTML("""
|
| 99 |
+
<span class="ydcoza_gradio_banner">View The last 50 Queries generated in Table format.</span>
|
| 100 |
+
""")
|
| 101 |
+
saved_queries_output = gr.Dataframe(label="Last 50 Saved Queries", headers=["Query", "Optimised Query", "SQL", "Timestamp"], interactive=True, visible=False)
|
| 102 |
+
# Show the last 50 saved queries when button is clicked
|
| 103 |
+
show_saved_queries_button = gr.Button("View Queries", elem_id='ydcoza_gradio_button')
|
| 104 |
+
show_saved_queries_button.click(show_last_50_saved_queries, outputs=saved_queries_output).then(
|
| 105 |
+
lambda: gr.update(visible=True), outputs=saved_queries_output # Make the saved queries visible
|
| 106 |
+
)
|
| 107 |
+
gr.HTML("""
|
| 108 |
+
<span class="ydcoza_gradio_banner">Download the generated Queries in .csv for you to explore.</span>
|
| 109 |
+
""")
|
| 110 |
+
csv_file_output = gr.File(label="Download CSV", visible=False) # Initially hidden
|
| 111 |
+
download_csv_button = gr.Button("Download Queries", elem_id='ydcoza_gradio_button')
|
| 112 |
+
download_csv_button.click(export_saved_queries_to_csv, outputs=csv_file_output).then(
|
| 113 |
+
lambda: gr.update(visible=True), outputs=csv_file_output # Make the file download visible
|
| 114 |
+
)
|
| 115 |
+
gr.HTML("""
|
| 116 |
+
<span class="ydcoza_gradio_banner">If you made changes to the database structure we need to import the latest DB Schema.</span>
|
| 117 |
+
""")
|
| 118 |
+
# Add a button to pull the latest schema and save it to schema.json
|
| 119 |
+
fetch_schema_button = gr.Button("Fetch Latest Schema", elem_id='ydcoza_gradio_button')
|
| 120 |
+
# fetch_schema_button.click(update_schema, outputs=[gr.Textbox(label="Schema Update Status")])
|
| 121 |
+
fetch_schema_button.click(update_schema)
|
| 122 |
+
|
| 123 |
+
# Setup the button click to trigger the process and show results
|
| 124 |
+
text_input.change(fn=update_button_state, inputs=text_input, outputs=start_button)
|
| 125 |
+
start_button.click(
|
| 126 |
+
fn=query_database,
|
| 127 |
+
inputs=[text_input],
|
| 128 |
+
outputs=[reformulated_output, sql_output, sql_result_output]
|
| 129 |
+
).then(
|
| 130 |
+
continue_process,
|
| 131 |
+
outputs=[sql_output, sql_result_output]
|
| 132 |
+
).then(
|
| 133 |
+
lambda: gr.update(interactive=False), outputs=start_button # Disable the submit button after submission
|
| 134 |
+
)
|
| 135 |
+
|
| 136 |
+
# Launch the Gradio interface
|
| 137 |
+
if __name__ == "__main__":
|
| 138 |
+
# ydcoza_face.launch(auth=auth_users, auth_message="Demo Login, Username: admin & Password: 1234")
|
| 139 |
+
# run gradio deploy in Terminal
|
| 140 |
+
ydcoza_face.launch()
|
db.py
ADDED
|
@@ -0,0 +1,245 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
import json
|
| 3 |
+
import psycopg2
|
| 4 |
+
from psycopg2 import pool
|
| 5 |
+
import sqlite3
|
| 6 |
+
from dotenv import load_dotenv
|
| 7 |
+
import logging
|
| 8 |
+
import csv
|
| 9 |
+
|
| 10 |
+
# Load environment variables
|
| 11 |
+
load_dotenv()
|
| 12 |
+
|
| 13 |
+
# Initialize logging
|
| 14 |
+
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
|
| 15 |
+
|
| 16 |
+
# PostgreSQL connection pool setup
|
| 17 |
+
db_pool = psycopg2.pool.SimpleConnectionPool(
|
| 18 |
+
minconn=1,
|
| 19 |
+
maxconn=5,
|
| 20 |
+
user=os.getenv("db_user"),
|
| 21 |
+
password=os.getenv("db_password"),
|
| 22 |
+
host=os.getenv("db_host"),
|
| 23 |
+
dbname=os.getenv("db_name"),
|
| 24 |
+
port=os.getenv("db_port", 5432)
|
| 25 |
+
)
|
| 26 |
+
|
| 27 |
+
# SQLite DB path
|
| 28 |
+
db_path = os.getenv("DB_PATH", "./query_logs.db")
|
| 29 |
+
|
| 30 |
+
# PostgreSQL: Function to fetch schema and save to schema.json
|
| 31 |
+
def fetch_and_save_schema():
|
| 32 |
+
try:
|
| 33 |
+
logging.info("Fetching schema from the database...")
|
| 34 |
+
|
| 35 |
+
conn = db_pool.getconn()
|
| 36 |
+
cursor = conn.cursor()
|
| 37 |
+
|
| 38 |
+
# Query to retrieve all table names and their comments
|
| 39 |
+
cursor.execute("""
|
| 40 |
+
SELECT table_name, obj_description(('public.' || table_name)::regclass) as table_comment
|
| 41 |
+
FROM information_schema.tables
|
| 42 |
+
WHERE table_schema = 'public';
|
| 43 |
+
""")
|
| 44 |
+
tables = cursor.fetchall()
|
| 45 |
+
|
| 46 |
+
# Build the schema information in JSON format
|
| 47 |
+
schema_info = {}
|
| 48 |
+
|
| 49 |
+
for table_name, table_comment in tables:
|
| 50 |
+
schema_info[table_name] = {
|
| 51 |
+
"comment": table_comment,
|
| 52 |
+
"columns": [],
|
| 53 |
+
"foreign_keys": []
|
| 54 |
+
}
|
| 55 |
+
|
| 56 |
+
# Fetch column details and comments for each table
|
| 57 |
+
cursor.execute(f"""
|
| 58 |
+
SELECT
|
| 59 |
+
c.column_name,
|
| 60 |
+
c.data_type,
|
| 61 |
+
col_description(('public.' || c.table_name)::regclass, ordinal_position) as column_comment
|
| 62 |
+
FROM information_schema.columns c
|
| 63 |
+
WHERE c.table_name = '{table_name}';
|
| 64 |
+
""")
|
| 65 |
+
columns = cursor.fetchall()
|
| 66 |
+
|
| 67 |
+
for column_name, data_type, column_comment in columns:
|
| 68 |
+
schema_info[table_name]["columns"].append({
|
| 69 |
+
"name": column_name,
|
| 70 |
+
"data_type": data_type,
|
| 71 |
+
"comment": column_comment
|
| 72 |
+
})
|
| 73 |
+
|
| 74 |
+
# Fetch foreign key relationships for each table
|
| 75 |
+
cursor.execute(f"""
|
| 76 |
+
SELECT
|
| 77 |
+
kcu.column_name,
|
| 78 |
+
ccu.table_name AS foreign_table_name,
|
| 79 |
+
ccu.column_name AS foreign_column_name
|
| 80 |
+
FROM information_schema.table_constraints AS tc
|
| 81 |
+
JOIN information_schema.key_column_usage AS kcu
|
| 82 |
+
ON tc.constraint_name = kcu.constraint_name
|
| 83 |
+
AND tc.table_schema = kcu.table_schema
|
| 84 |
+
JOIN information_schema.constraint_column_usage AS ccu
|
| 85 |
+
ON ccu.constraint_name = tc.constraint_name
|
| 86 |
+
WHERE tc.constraint_type = 'FOREIGN KEY' AND tc.table_name = '{table_name}';
|
| 87 |
+
""")
|
| 88 |
+
foreign_keys = cursor.fetchall()
|
| 89 |
+
|
| 90 |
+
for column_name, foreign_table_name, foreign_column_name in foreign_keys:
|
| 91 |
+
schema_info[table_name]["foreign_keys"].append({
|
| 92 |
+
"column": column_name,
|
| 93 |
+
"references": {
|
| 94 |
+
"table": foreign_table_name,
|
| 95 |
+
"column": foreign_column_name
|
| 96 |
+
}
|
| 97 |
+
})
|
| 98 |
+
|
| 99 |
+
cursor.close()
|
| 100 |
+
db_pool.putconn(conn)
|
| 101 |
+
|
| 102 |
+
# Save the schema to a JSON file
|
| 103 |
+
with open("schema.json", "w") as schema_file:
|
| 104 |
+
json.dump(schema_info, schema_file, indent=2)
|
| 105 |
+
|
| 106 |
+
logging.info("Schema fetched and saved to schema.json.")
|
| 107 |
+
return schema_info
|
| 108 |
+
except Exception as e:
|
| 109 |
+
logging.error(f"Error fetching schema: {e}")
|
| 110 |
+
return {"error": str(e)}
|
| 111 |
+
|
| 112 |
+
# PostgreSQL: Function to execute SQL query
|
| 113 |
+
def execute_sql_query(sql_query):
|
| 114 |
+
try:
|
| 115 |
+
conn = db_pool.getconn()
|
| 116 |
+
cursor = conn.cursor()
|
| 117 |
+
cursor.execute(sql_query) # Execute the query
|
| 118 |
+
result = cursor.fetchall() # Fetch all results
|
| 119 |
+
|
| 120 |
+
# Get column names from the cursor description
|
| 121 |
+
column_names = [desc[0] for desc in cursor.description]
|
| 122 |
+
|
| 123 |
+
cursor.close()
|
| 124 |
+
db_pool.putconn(conn)
|
| 125 |
+
|
| 126 |
+
# Format the result as a list of lists for Gradio Dataframe
|
| 127 |
+
return [column_names] + result
|
| 128 |
+
except Exception as e:
|
| 129 |
+
logging.error(f"Error executing SQL query: {e}")
|
| 130 |
+
return str(e)
|
| 131 |
+
|
| 132 |
+
# SQLite: Initialize the local SQLite database
|
| 133 |
+
def initialize_local_db():
|
| 134 |
+
conn = sqlite3.connect(db_path)
|
| 135 |
+
cursor = conn.cursor()
|
| 136 |
+
|
| 137 |
+
# Create table if it doesn't exist
|
| 138 |
+
cursor.execute('''
|
| 139 |
+
CREATE TABLE IF NOT EXISTS query_logs (
|
| 140 |
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
| 141 |
+
natural_language_query TEXT,
|
| 142 |
+
reformulated_query TEXT,
|
| 143 |
+
generated_sql TEXT,
|
| 144 |
+
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
| 145 |
+
);
|
| 146 |
+
''')
|
| 147 |
+
conn.commit()
|
| 148 |
+
cursor.close()
|
| 149 |
+
conn.close()
|
| 150 |
+
|
| 151 |
+
# SQLite: Function to save the query to the local database
|
| 152 |
+
def save_query_to_local_db(nl_query, reformulated_query, sql_query):
|
| 153 |
+
try:
|
| 154 |
+
conn = sqlite3.connect(db_path)
|
| 155 |
+
cursor = conn.cursor()
|
| 156 |
+
insert_query = '''
|
| 157 |
+
INSERT INTO query_logs (natural_language_query, reformulated_query, generated_sql)
|
| 158 |
+
VALUES (?, ?, ?);
|
| 159 |
+
'''
|
| 160 |
+
cursor.execute(insert_query, (nl_query, reformulated_query, sql_query))
|
| 161 |
+
conn.commit()
|
| 162 |
+
cursor.close()
|
| 163 |
+
conn.close()
|
| 164 |
+
except Exception as e:
|
| 165 |
+
logging.error(f"Error saving query: {e}")
|
| 166 |
+
|
| 167 |
+
# SQLite: Function to get the last 50 saved queries
|
| 168 |
+
def get_last_50_saved_queries():
|
| 169 |
+
try:
|
| 170 |
+
conn = sqlite3.connect(db_path)
|
| 171 |
+
cursor = conn.cursor()
|
| 172 |
+
query = "SELECT natural_language_query, reformulated_query, generated_sql, created_at FROM query_logs ORDER BY created_at DESC LIMIT 50;"
|
| 173 |
+
cursor.execute(query)
|
| 174 |
+
rows = cursor.fetchall()
|
| 175 |
+
cursor.close()
|
| 176 |
+
conn.close()
|
| 177 |
+
return rows
|
| 178 |
+
except Exception as e:
|
| 179 |
+
logging.error(f"Error retrieving saved queries: {e}")
|
| 180 |
+
return str(e)
|
| 181 |
+
|
| 182 |
+
# SQLite: Function to export saved queries to a CSV file
|
| 183 |
+
def export_saved_queries_to_csv(file_path="./saved_queries.csv"):
|
| 184 |
+
try:
|
| 185 |
+
conn = sqlite3.connect(db_path)
|
| 186 |
+
cursor = conn.cursor()
|
| 187 |
+
|
| 188 |
+
# Fetch all saved queries
|
| 189 |
+
cursor.execute("SELECT natural_language_query, reformulated_query, generated_sql, created_at FROM query_logs ORDER BY created_at DESC;")
|
| 190 |
+
rows = cursor.fetchall()
|
| 191 |
+
|
| 192 |
+
# Write the results to a CSV file
|
| 193 |
+
with open(file_path, 'w', newline='') as csvfile:
|
| 194 |
+
csv_writer = csv.writer(csvfile)
|
| 195 |
+
csv_writer.writerow(['Natural Language Query', 'Reformulated Query', 'Generated SQL', 'Timestamp'])
|
| 196 |
+
csv_writer.writerows(rows)
|
| 197 |
+
|
| 198 |
+
cursor.close()
|
| 199 |
+
conn.close()
|
| 200 |
+
|
| 201 |
+
return file_path
|
| 202 |
+
except Exception as e:
|
| 203 |
+
logging.error(f"Error exporting queries to CSV: {e}")
|
| 204 |
+
return str(e)
|
| 205 |
+
|
| 206 |
+
def show_last_50_saved_queries():
|
| 207 |
+
try:
|
| 208 |
+
conn = sqlite3.connect(db_path)
|
| 209 |
+
cursor = conn.cursor()
|
| 210 |
+
query = "SELECT natural_language_query, reformulated_query, generated_sql, created_at FROM query_logs ORDER BY created_at DESC LIMIT 50;"
|
| 211 |
+
cursor.execute(query)
|
| 212 |
+
rows = cursor.fetchall()
|
| 213 |
+
cursor.close()
|
| 214 |
+
conn.close()
|
| 215 |
+
return rows
|
| 216 |
+
except Exception as e:
|
| 217 |
+
logging.error(f"Error retrieving saved queries: {e}")
|
| 218 |
+
return str(e)
|
| 219 |
+
|
| 220 |
+
|
| 221 |
+
# Function to reset (drop all tables) and recreate the schema
|
| 222 |
+
def reset_sqlite_db():
|
| 223 |
+
conn = sqlite3.connect(db_path)
|
| 224 |
+
cursor = conn.cursor()
|
| 225 |
+
|
| 226 |
+
# Drop the query_logs table
|
| 227 |
+
cursor.execute("DROP TABLE IF EXISTS query_logs;")
|
| 228 |
+
print("Dropped query_logs table")
|
| 229 |
+
|
| 230 |
+
# Recreate the query_logs table
|
| 231 |
+
cursor.execute('''CREATE TABLE query_logs (
|
| 232 |
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
| 233 |
+
natural_language_query TEXT,
|
| 234 |
+
reformulated_query TEXT,
|
| 235 |
+
generated_sql TEXT,
|
| 236 |
+
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
| 237 |
+
);''')
|
| 238 |
+
print("Recreated the query_logs table.")
|
| 239 |
+
|
| 240 |
+
conn.commit()
|
| 241 |
+
cursor.close()
|
| 242 |
+
conn.close()
|
| 243 |
+
|
| 244 |
+
# Uncomment the following line to reset the SQLite database when you run this script
|
| 245 |
+
# reset_sqlite_db()
|
db_logging.py
ADDED
|
@@ -0,0 +1,161 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
import sqlite3
|
| 3 |
+
import csv
|
| 4 |
+
from dotenv import load_dotenv
|
| 5 |
+
|
| 6 |
+
# Load environment variables from .env file
|
| 7 |
+
load_dotenv()
|
| 8 |
+
|
| 9 |
+
# SQLite DB path from .env for saving results
|
| 10 |
+
db_path = os.getenv("db_path", "./query_logs.db")
|
| 11 |
+
|
| 12 |
+
# Function to initialize the SQLite database
|
| 13 |
+
def initialize_local_db():
|
| 14 |
+
conn = sqlite3.connect(db_path)
|
| 15 |
+
cursor = conn.cursor()
|
| 16 |
+
|
| 17 |
+
# Check if the reformulated_query column exists
|
| 18 |
+
cursor.execute("PRAGMA table_info(query_logs);")
|
| 19 |
+
columns = [column[1] for column in cursor.fetchall()]
|
| 20 |
+
|
| 21 |
+
# If the reformulated_query column doesn't exist, alter the table
|
| 22 |
+
if 'reformulated_query' not in columns:
|
| 23 |
+
print("Altering table to add reformulated_query column...")
|
| 24 |
+
cursor.execute('''
|
| 25 |
+
CREATE TABLE IF NOT EXISTS query_logs_new (
|
| 26 |
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
| 27 |
+
natural_language_query TEXT,
|
| 28 |
+
reformulated_query TEXT,
|
| 29 |
+
generated_sql TEXT,
|
| 30 |
+
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
| 31 |
+
);
|
| 32 |
+
''')
|
| 33 |
+
|
| 34 |
+
# Copy data from old table to new table
|
| 35 |
+
cursor.execute('''
|
| 36 |
+
INSERT INTO query_logs_new (id, natural_language_query, generated_sql, created_at)
|
| 37 |
+
SELECT id, natural_language_query, generated_sql, created_at FROM query_logs;
|
| 38 |
+
''')
|
| 39 |
+
|
| 40 |
+
# Drop the old table
|
| 41 |
+
cursor.execute("DROP TABLE query_logs;")
|
| 42 |
+
|
| 43 |
+
# Rename the new table
|
| 44 |
+
cursor.execute("ALTER TABLE query_logs_new RENAME TO query_logs;")
|
| 45 |
+
|
| 46 |
+
conn.commit()
|
| 47 |
+
cursor.close()
|
| 48 |
+
conn.close()
|
| 49 |
+
|
| 50 |
+
# Function to reset (drop all tables) and recreate the schema
|
| 51 |
+
def reset_sqlite_db():
|
| 52 |
+
conn = sqlite3.connect(db_path)
|
| 53 |
+
cursor = conn.cursor()
|
| 54 |
+
|
| 55 |
+
# Drop the query_logs table
|
| 56 |
+
cursor.execute("DROP TABLE IF EXISTS query_logs;")
|
| 57 |
+
print("Dropped query_logs table")
|
| 58 |
+
|
| 59 |
+
# Recreate the query_logs table
|
| 60 |
+
cursor.execute('''CREATE TABLE query_logs (
|
| 61 |
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
| 62 |
+
natural_language_query TEXT,
|
| 63 |
+
reformulated_query TEXT,
|
| 64 |
+
generated_sql TEXT,
|
| 65 |
+
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
| 66 |
+
);''')
|
| 67 |
+
print("Recreated the query_logs table.")
|
| 68 |
+
|
| 69 |
+
conn.commit()
|
| 70 |
+
cursor.close()
|
| 71 |
+
conn.close()
|
| 72 |
+
|
| 73 |
+
# Function to save the natural language query, reformulated query, and generated SQL to the local SQLite DB
|
| 74 |
+
def save_query_to_local_db(nl_query, reformulated_query, sql_query):
|
| 75 |
+
try:
|
| 76 |
+
conn = sqlite3.connect(db_path)
|
| 77 |
+
cursor = conn.cursor()
|
| 78 |
+
insert_query = '''
|
| 79 |
+
INSERT INTO query_logs (natural_language_query, reformulated_query, generated_sql)
|
| 80 |
+
VALUES (?, ?, ?);
|
| 81 |
+
'''
|
| 82 |
+
cursor.execute(insert_query, (nl_query, reformulated_query, sql_query))
|
| 83 |
+
conn.commit()
|
| 84 |
+
cursor.close()
|
| 85 |
+
conn.close()
|
| 86 |
+
except Exception as e:
|
| 87 |
+
print(f"Error saving query: {e}")
|
| 88 |
+
|
| 89 |
+
# Function to retrieve saved queries from the local SQLite DB with pagination and search
|
| 90 |
+
def get_saved_queries(page=1, per_page=10, search_term=None):
|
| 91 |
+
try:
|
| 92 |
+
offset = (page - 1) * per_page
|
| 93 |
+
conn = sqlite3.connect(db_path)
|
| 94 |
+
cursor = conn.cursor()
|
| 95 |
+
|
| 96 |
+
query = "SELECT natural_language_query, reformulated_query, generated_sql, created_at FROM query_logs"
|
| 97 |
+
if search_term:
|
| 98 |
+
query += f" WHERE natural_language_query LIKE '%{search_term}%' OR generated_sql LIKE '%{search_term}%'"
|
| 99 |
+
query += f" ORDER BY created_at DESC LIMIT {per_page} OFFSET {offset};"
|
| 100 |
+
|
| 101 |
+
cursor.execute(query)
|
| 102 |
+
rows = cursor.fetchall()
|
| 103 |
+
|
| 104 |
+
cursor.close()
|
| 105 |
+
conn.close()
|
| 106 |
+
|
| 107 |
+
return rows
|
| 108 |
+
except Exception as e:
|
| 109 |
+
return str(e)
|
| 110 |
+
|
| 111 |
+
# Manually clear all data (for testing purposes)
|
| 112 |
+
def clear_data():
|
| 113 |
+
conn = sqlite3.connect(db_path)
|
| 114 |
+
cursor = conn.cursor()
|
| 115 |
+
cursor.execute("DELETE FROM query_logs")
|
| 116 |
+
conn.commit()
|
| 117 |
+
cursor.close()
|
| 118 |
+
conn.close()
|
| 119 |
+
|
| 120 |
+
# Function to retrieve the last 50 saved queries from the local SQLite DB
|
| 121 |
+
def get_last_50_saved_queries():
|
| 122 |
+
try:
|
| 123 |
+
conn = sqlite3.connect(db_path)
|
| 124 |
+
cursor = conn.cursor()
|
| 125 |
+
|
| 126 |
+
query = "SELECT natural_language_query, reformulated_query, generated_sql, created_at FROM query_logs ORDER BY created_at DESC LIMIT 50;"
|
| 127 |
+
cursor.execute(query)
|
| 128 |
+
rows = cursor.fetchall()
|
| 129 |
+
|
| 130 |
+
cursor.close()
|
| 131 |
+
conn.close()
|
| 132 |
+
|
| 133 |
+
return rows
|
| 134 |
+
except Exception as e:
|
| 135 |
+
return str(e)
|
| 136 |
+
|
| 137 |
+
# Function to export all saved queries to a CSV file
|
| 138 |
+
def export_saved_queries_to_csv(file_path="./saved_queries.csv"):
|
| 139 |
+
try:
|
| 140 |
+
conn = sqlite3.connect(db_path)
|
| 141 |
+
cursor = conn.cursor()
|
| 142 |
+
|
| 143 |
+
# Fetch all saved queries (remove LIMIT 50)
|
| 144 |
+
cursor.execute("SELECT natural_language_query, reformulated_query, generated_sql, created_at FROM query_logs ORDER BY created_at DESC;")
|
| 145 |
+
rows = cursor.fetchall()
|
| 146 |
+
|
| 147 |
+
# Write the results to a CSV file
|
| 148 |
+
with open(file_path, 'w', newline='') as csvfile:
|
| 149 |
+
csv_writer = csv.writer(csvfile)
|
| 150 |
+
csv_writer.writerow(['Natural Language Query', 'Reformulated Query', 'Generated SQL', 'Timestamp'])
|
| 151 |
+
csv_writer.writerows(rows)
|
| 152 |
+
|
| 153 |
+
cursor.close()
|
| 154 |
+
conn.close()
|
| 155 |
+
|
| 156 |
+
return file_path
|
| 157 |
+
except Exception as e:
|
| 158 |
+
return str(e)
|
| 159 |
+
|
| 160 |
+
# Uncomment the following line to reset the SQLite database when you run this script
|
| 161 |
+
# reset_sqlite_db()
|
db_setup.py
ADDED
|
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
from dotenv import load_dotenv
|
| 3 |
+
import sqlite3
|
| 4 |
+
|
| 5 |
+
# Load environment variables
|
| 6 |
+
load_dotenv()
|
| 7 |
+
|
| 8 |
+
# Get database path from environment variable
|
| 9 |
+
db_path = os.getenv("DB_PATH", "./query_logs.db") # Default to './query_logs.db' if not set
|
| 10 |
+
|
| 11 |
+
# Function to initialize SQLite database
|
| 12 |
+
def initialize_local_db():
|
| 13 |
+
conn = sqlite3.connect(db_path) # Use the DB path from .env
|
| 14 |
+
cursor = conn.cursor()
|
| 15 |
+
|
| 16 |
+
# Create table if it doesn't exist
|
| 17 |
+
cursor.execute('''
|
| 18 |
+
CREATE TABLE IF NOT EXISTS query_logs (
|
| 19 |
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
| 20 |
+
natural_language_query TEXT,
|
| 21 |
+
generated_sql TEXT,
|
| 22 |
+
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
| 23 |
+
);
|
| 24 |
+
''')
|
| 25 |
+
|
| 26 |
+
conn.commit()
|
| 27 |
+
cursor.close()
|
| 28 |
+
conn.close()
|
| 29 |
+
|
| 30 |
+
# Initialize the database on startup
|
| 31 |
+
initialize_local_db()
|
openai_integration.py
ADDED
|
@@ -0,0 +1,194 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
import openai
|
| 3 |
+
import json
|
| 4 |
+
from dotenv import load_dotenv
|
| 5 |
+
from db_logging import save_query_to_local_db
|
| 6 |
+
import logging
|
| 7 |
+
|
| 8 |
+
GPT_MODEL = "gpt-4o-mini"
|
| 9 |
+
|
| 10 |
+
# Load environment variables from .env file
|
| 11 |
+
load_dotenv()
|
| 12 |
+
|
| 13 |
+
# Configure logging
|
| 14 |
+
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
|
| 15 |
+
|
| 16 |
+
# Initialize the OpenAI client using API key from .env
|
| 17 |
+
client = openai.OpenAI(api_key=os.getenv("OPENAI_API_KEY"))
|
| 18 |
+
|
| 19 |
+
# Function to load schema from schema.json
|
| 20 |
+
def load_schema_from_json():
|
| 21 |
+
try:
|
| 22 |
+
with open("schema.json", "r") as schema_file:
|
| 23 |
+
schema_info = json.load(schema_file)
|
| 24 |
+
logging.info("Schema loaded from schema.json")
|
| 25 |
+
return schema_info
|
| 26 |
+
except Exception as e:
|
| 27 |
+
logging.error(f"Error loading schema from schema.json: {e}")
|
| 28 |
+
return {"error": str(e)}
|
| 29 |
+
|
| 30 |
+
# Function to build the schema description with foreign keys
|
| 31 |
+
def build_schema_description(schema_info):
|
| 32 |
+
schema_description = ""
|
| 33 |
+
|
| 34 |
+
for table_name, table_details in schema_info.items():
|
| 35 |
+
schema_description += f"Table {table_name} (Comment: {table_details['comment']}):\n"
|
| 36 |
+
for column in table_details["columns"]:
|
| 37 |
+
schema_description += f" - {column['name']} ({column['data_type']}) (Comment: {column['comment']})\n"
|
| 38 |
+
if table_details.get("foreign_keys"):
|
| 39 |
+
schema_description += " Foreign Keys:\n"
|
| 40 |
+
for fk in table_details["foreign_keys"]:
|
| 41 |
+
schema_description += f" - {fk['column']} references {fk['references']['table']}({fk['references']['column']})\n"
|
| 42 |
+
return schema_description
|
| 43 |
+
|
| 44 |
+
# Function to reformulate the natural language query using OpenAI API
|
| 45 |
+
def reformulate_query(nl_query, schema_description):
|
| 46 |
+
try:
|
| 47 |
+
# Adding examples to guide the reformulation process
|
| 48 |
+
examples = """
|
| 49 |
+
Example 1:
|
| 50 |
+
Input: "I'm trying to figure out which agents are the busiest. Can you show me like the top few agents who have a lot on their plate?"
|
| 51 |
+
Reformulated: "Show me the top 5 agents with the highest workload, including their names, emails, and specializations."
|
| 52 |
+
|
| 53 |
+
Example 2:
|
| 54 |
+
Input: "I need to get an overview of our classes. Could you pull up a list that shows what each class is about and which client it's for?"
|
| 55 |
+
Reformulated: "List all classes with their subjects, client names, and the number of learners in each class, sorted by the number of learners in descending order."
|
| 56 |
+
"""
|
| 57 |
+
|
| 58 |
+
prompt = (
|
| 59 |
+
f"Database Schema:\n{schema_description}\n\n"
|
| 60 |
+
f"Using the database schema, reformulate the following natural language query to be more precise and clear for generating an SQL query:\n{nl_query}\n\n"
|
| 61 |
+
f"Here are some examples of how to reformulate the natural language query:\n{examples}\n\n"
|
| 62 |
+
f"Output only the natural language query string using the provided schema, don't prefix with anything and don't use markdown"
|
| 63 |
+
)
|
| 64 |
+
|
| 65 |
+
logging.info("Sending reformulation request to OpenAI...")
|
| 66 |
+
response = client.chat.completions.create(
|
| 67 |
+
model=GPT_MODEL,
|
| 68 |
+
messages=[
|
| 69 |
+
{"role": "system", "content": "You are an assistant that helps reformulate natural language queries and improve on them."},
|
| 70 |
+
{"role": "user", "content": prompt}
|
| 71 |
+
],
|
| 72 |
+
max_tokens=300,
|
| 73 |
+
temperature=0.7
|
| 74 |
+
)
|
| 75 |
+
|
| 76 |
+
reformulated_query = response.choices[0].message.content.strip()
|
| 77 |
+
|
| 78 |
+
logging.info(f"Reformulated Query: {reformulated_query}")
|
| 79 |
+
return reformulated_query
|
| 80 |
+
except Exception as e:
|
| 81 |
+
logging.error(f"Error reformulating query: {e}")
|
| 82 |
+
return str(e)
|
| 83 |
+
|
| 84 |
+
# Function to generate SQL from reformulated query using OpenAI API
|
| 85 |
+
def generate_sql_from_reformulated(reformulated_query, schema_description):
|
| 86 |
+
try:
|
| 87 |
+
# Adding examples to guide the SQL generation process
|
| 88 |
+
examples = """
|
| 89 |
+
Example 1:
|
| 90 |
+
Input:
|
| 91 |
+
"Show me the top 5 agents with the highest workload, including their names, emails, and specializations."
|
| 92 |
+
SQL:
|
| 93 |
+
SELECT name, email, specialization, current_workload
|
| 94 |
+
FROM agents
|
| 95 |
+
ORDER BY current_workload DESC
|
| 96 |
+
LIMIT 5;
|
| 97 |
+
|
| 98 |
+
Example 2:
|
| 99 |
+
Input:
|
| 100 |
+
"List all classes with their subjects, client names, and the number of learners in each class, sorted by the number of learners in descending order."
|
| 101 |
+
SQL:
|
| 102 |
+
SELECT c.id AS class_id, c.subject, cl.name AS client_name, COUNT(l.id) AS learner_count
|
| 103 |
+
FROM classes c
|
| 104 |
+
JOIN clients cl ON c.client_id = cl.id
|
| 105 |
+
LEFT JOIN learners l ON c.id = l.class_id
|
| 106 |
+
GROUP BY c.id, c.subject, cl.name
|
| 107 |
+
ORDER BY learner_count DESC;
|
| 108 |
+
|
| 109 |
+
Example 3:
|
| 110 |
+
Input:
|
| 111 |
+
"List all agents' work history, including the class they worked on, their role, and the start and end dates of each task."
|
| 112 |
+
SQL:
|
| 113 |
+
SELECT a.name AS agent_name, awh.class_id, awh.role, awh.start_date, awh.end_date
|
| 114 |
+
FROM agents a
|
| 115 |
+
JOIN agent_work_history awh ON a.id = awh.agent_id
|
| 116 |
+
ORDER BY awh.start_date DESC;
|
| 117 |
+
|
| 118 |
+
Example 4:
|
| 119 |
+
Input:
|
| 120 |
+
"Get a list of all classes with their subjects, the number of learners, and the progression status of each learner in the class."
|
| 121 |
+
SQL:
|
| 122 |
+
SELECT c.id AS class_id, c.subject, COUNT(l.id) AS learner_count, p.progression_level
|
| 123 |
+
FROM classes c
|
| 124 |
+
LEFT JOIN learners l ON c.id = l.class_id
|
| 125 |
+
LEFT JOIN progressions p ON l.id = p.learner_id
|
| 126 |
+
GROUP BY c.id, c.subject, p.progression_level
|
| 127 |
+
ORDER BY learner_count DESC;
|
| 128 |
+
"""
|
| 129 |
+
|
| 130 |
+
prompt = (
|
| 131 |
+
f"Database Schema:\n{schema_description}\n\n"
|
| 132 |
+
f"Natural language query:\n{reformulated_query}\n\n"
|
| 133 |
+
f"Examples:\n{examples}\n\n"
|
| 134 |
+
f"Convert the natural language query into an SQL query that matches the schema.\n\n"
|
| 135 |
+
f"Use the Examples as a guide on what I expect you to do.\n\n"
|
| 136 |
+
f"Only return the RAW SQL that can be directly executed, don't prefix it with sql and don't end the sql with Quotation marks etc"
|
| 137 |
+
)
|
| 138 |
+
|
| 139 |
+
logging.info("Sending SQL generation request to OpenAI...")
|
| 140 |
+
response = client.chat.completions.create(
|
| 141 |
+
model=GPT_MODEL,
|
| 142 |
+
messages=[
|
| 143 |
+
{"role": "system", "content": "You are an assistant that converts natural language to SQL using a databse shema."},
|
| 144 |
+
{"role": "user", "content": prompt}
|
| 145 |
+
],
|
| 146 |
+
max_tokens=300,
|
| 147 |
+
temperature=0.7
|
| 148 |
+
)
|
| 149 |
+
|
| 150 |
+
sql_query = response.choices[0].message.content.strip()
|
| 151 |
+
|
| 152 |
+
logging.info(f"SQL Query generated: {sql_query}")
|
| 153 |
+
return sql_query
|
| 154 |
+
except Exception as e:
|
| 155 |
+
logging.error(f"Error generating SQL: {e}")
|
| 156 |
+
return str(e)
|
| 157 |
+
|
| 158 |
+
# Main function to reformulate the query first and then generate SQL
|
| 159 |
+
def generate_sql(nl_query):
|
| 160 |
+
try:
|
| 161 |
+
# Load the schema from schema.json
|
| 162 |
+
schema_info = load_schema_from_json()
|
| 163 |
+
|
| 164 |
+
if isinstance(schema_info, str) and schema_info.startswith("Error"):
|
| 165 |
+
return "Error fetching schema", ""
|
| 166 |
+
|
| 167 |
+
# Build the schema description once and reuse it
|
| 168 |
+
schema_description = build_schema_description(schema_info)
|
| 169 |
+
logging.error(f"Build Shema Description: {schema_description}")
|
| 170 |
+
|
| 171 |
+
# Reformulate the query
|
| 172 |
+
reformulated_query = reformulate_query(nl_query, schema_description)
|
| 173 |
+
|
| 174 |
+
if "Error" in reformulated_query:
|
| 175 |
+
return reformulated_query, ""
|
| 176 |
+
|
| 177 |
+
# Generate SQL based on the reformulated query
|
| 178 |
+
sql_query = generate_sql_from_reformulated(reformulated_query, schema_description)
|
| 179 |
+
|
| 180 |
+
if "Error" in sql_query:
|
| 181 |
+
return reformulated_query, sql_query
|
| 182 |
+
|
| 183 |
+
# Save the query to the local database
|
| 184 |
+
save_query_to_local_db(nl_query, reformulated_query, sql_query)
|
| 185 |
+
|
| 186 |
+
return reformulated_query, sql_query
|
| 187 |
+
|
| 188 |
+
except openai.error.OpenAIError as e:
|
| 189 |
+
logging.error(f"OpenAI API error: {e}")
|
| 190 |
+
return "Error during interaction with OpenAI API.", ""
|
| 191 |
+
|
| 192 |
+
except Exception as e:
|
| 193 |
+
logging.error(f"General error during SQL process: {e}")
|
| 194 |
+
return "General error during SQL processing.", ""
|
requirements.txt
ADDED
|
@@ -0,0 +1,65 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
aiofiles==23.2.1
|
| 2 |
+
annotated-types==0.7.0
|
| 3 |
+
anyio==4.4.0
|
| 4 |
+
asarPy==1.0.1
|
| 5 |
+
certifi==2024.8.30
|
| 6 |
+
charset-normalizer==3.3.2
|
| 7 |
+
click==8.1.7
|
| 8 |
+
contourpy==1.3.0
|
| 9 |
+
cycler==0.12.1
|
| 10 |
+
distro==1.9.0
|
| 11 |
+
fastapi==0.114.2
|
| 12 |
+
ffmpy==0.4.0
|
| 13 |
+
filelock==3.16.0
|
| 14 |
+
fonttools==4.53.1
|
| 15 |
+
fsspec==2024.9.0
|
| 16 |
+
gradio==4.44.0
|
| 17 |
+
gradio_client==1.3.0
|
| 18 |
+
h11==0.14.0
|
| 19 |
+
httpcore==1.0.5
|
| 20 |
+
httpx==0.27.2
|
| 21 |
+
huggingface-hub==0.24.7
|
| 22 |
+
idna==3.10
|
| 23 |
+
importlib_resources==6.4.5
|
| 24 |
+
Jinja2==3.1.4
|
| 25 |
+
jiter==0.5.0
|
| 26 |
+
kiwisolver==1.4.7
|
| 27 |
+
markdown-it-py==3.0.0
|
| 28 |
+
MarkupSafe==2.1.5
|
| 29 |
+
matplotlib==3.9.2
|
| 30 |
+
mdurl==0.1.2
|
| 31 |
+
numpy==2.1.1
|
| 32 |
+
openai==1.45.1
|
| 33 |
+
orjson==3.10.7
|
| 34 |
+
packaging==24.1
|
| 35 |
+
pandas==2.2.2
|
| 36 |
+
pillow==10.4.0
|
| 37 |
+
psycopg2-binary==2.9.9
|
| 38 |
+
pycairo==1.27.0
|
| 39 |
+
pydantic==2.9.1
|
| 40 |
+
pydantic_core==2.23.3
|
| 41 |
+
pydub==0.25.1
|
| 42 |
+
Pygments==2.18.0
|
| 43 |
+
PyGObject==3.50.0
|
| 44 |
+
pyparsing==3.1.4
|
| 45 |
+
python-dateutil==2.9.0.post0
|
| 46 |
+
python-dotenv==1.0.1
|
| 47 |
+
python-multipart==0.0.9
|
| 48 |
+
pytz==2024.2
|
| 49 |
+
PyYAML==6.0.2
|
| 50 |
+
requests==2.32.3
|
| 51 |
+
rich==13.8.1
|
| 52 |
+
ruff==0.6.5
|
| 53 |
+
semantic-version==2.10.0
|
| 54 |
+
shellingham==1.5.4
|
| 55 |
+
six==1.16.0
|
| 56 |
+
sniffio==1.3.1
|
| 57 |
+
starlette==0.38.5
|
| 58 |
+
tomlkit==0.12.0
|
| 59 |
+
tqdm==4.66.5
|
| 60 |
+
typer==0.12.5
|
| 61 |
+
typing_extensions==4.12.2
|
| 62 |
+
tzdata==2024.1
|
| 63 |
+
urllib3==2.2.3
|
| 64 |
+
uvicorn==0.30.6
|
| 65 |
+
websockets==12.0
|
schema.json
ADDED
|
@@ -0,0 +1,802 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"agent_assignments": {
|
| 3 |
+
"comment": null,
|
| 4 |
+
"columns": [
|
| 5 |
+
{
|
| 6 |
+
"name": "updated_at",
|
| 7 |
+
"data_type": "timestamp without time zone",
|
| 8 |
+
"comment": null
|
| 9 |
+
},
|
| 10 |
+
{
|
| 11 |
+
"name": "agent_id",
|
| 12 |
+
"data_type": "integer",
|
| 13 |
+
"comment": null
|
| 14 |
+
},
|
| 15 |
+
{
|
| 16 |
+
"name": "class_id",
|
| 17 |
+
"data_type": "integer",
|
| 18 |
+
"comment": null
|
| 19 |
+
},
|
| 20 |
+
{
|
| 21 |
+
"name": "task_id",
|
| 22 |
+
"data_type": "integer",
|
| 23 |
+
"comment": null
|
| 24 |
+
},
|
| 25 |
+
{
|
| 26 |
+
"name": "assignment_date",
|
| 27 |
+
"data_type": "date",
|
| 28 |
+
"comment": null
|
| 29 |
+
},
|
| 30 |
+
{
|
| 31 |
+
"name": "reassigned_agent_id",
|
| 32 |
+
"data_type": "integer",
|
| 33 |
+
"comment": null
|
| 34 |
+
},
|
| 35 |
+
{
|
| 36 |
+
"name": "reassignment_date",
|
| 37 |
+
"data_type": "date",
|
| 38 |
+
"comment": null
|
| 39 |
+
},
|
| 40 |
+
{
|
| 41 |
+
"name": "created_at",
|
| 42 |
+
"data_type": "timestamp without time zone",
|
| 43 |
+
"comment": null
|
| 44 |
+
},
|
| 45 |
+
{
|
| 46 |
+
"name": "id",
|
| 47 |
+
"data_type": "integer",
|
| 48 |
+
"comment": null
|
| 49 |
+
},
|
| 50 |
+
{
|
| 51 |
+
"name": "reassignment_reason",
|
| 52 |
+
"data_type": "text",
|
| 53 |
+
"comment": null
|
| 54 |
+
},
|
| 55 |
+
{
|
| 56 |
+
"name": "status",
|
| 57 |
+
"data_type": "character varying",
|
| 58 |
+
"comment": null
|
| 59 |
+
}
|
| 60 |
+
],
|
| 61 |
+
"foreign_keys": [
|
| 62 |
+
{
|
| 63 |
+
"column": "agent_id",
|
| 64 |
+
"references": {
|
| 65 |
+
"table": "agents",
|
| 66 |
+
"column": "id"
|
| 67 |
+
}
|
| 68 |
+
},
|
| 69 |
+
{
|
| 70 |
+
"column": "class_id",
|
| 71 |
+
"references": {
|
| 72 |
+
"table": "classes",
|
| 73 |
+
"column": "id"
|
| 74 |
+
}
|
| 75 |
+
},
|
| 76 |
+
{
|
| 77 |
+
"column": "task_id",
|
| 78 |
+
"references": {
|
| 79 |
+
"table": "tasks",
|
| 80 |
+
"column": "id"
|
| 81 |
+
}
|
| 82 |
+
}
|
| 83 |
+
]
|
| 84 |
+
},
|
| 85 |
+
"agent_availability": {
|
| 86 |
+
"comment": null,
|
| 87 |
+
"columns": [
|
| 88 |
+
{
|
| 89 |
+
"name": "created_at",
|
| 90 |
+
"data_type": "timestamp without time zone",
|
| 91 |
+
"comment": null
|
| 92 |
+
},
|
| 93 |
+
{
|
| 94 |
+
"name": "updated_at",
|
| 95 |
+
"data_type": "timestamp without time zone",
|
| 96 |
+
"comment": null
|
| 97 |
+
},
|
| 98 |
+
{
|
| 99 |
+
"name": "available_date",
|
| 100 |
+
"data_type": "date",
|
| 101 |
+
"comment": null
|
| 102 |
+
},
|
| 103 |
+
{
|
| 104 |
+
"name": "id",
|
| 105 |
+
"data_type": "integer",
|
| 106 |
+
"comment": null
|
| 107 |
+
},
|
| 108 |
+
{
|
| 109 |
+
"name": "agent_id",
|
| 110 |
+
"data_type": "integer",
|
| 111 |
+
"comment": null
|
| 112 |
+
},
|
| 113 |
+
{
|
| 114 |
+
"name": "availability_status",
|
| 115 |
+
"data_type": "character varying",
|
| 116 |
+
"comment": null
|
| 117 |
+
},
|
| 118 |
+
{
|
| 119 |
+
"name": "reason",
|
| 120 |
+
"data_type": "text",
|
| 121 |
+
"comment": null
|
| 122 |
+
}
|
| 123 |
+
],
|
| 124 |
+
"foreign_keys": [
|
| 125 |
+
{
|
| 126 |
+
"column": "agent_id",
|
| 127 |
+
"references": {
|
| 128 |
+
"table": "agents",
|
| 129 |
+
"column": "id"
|
| 130 |
+
}
|
| 131 |
+
}
|
| 132 |
+
]
|
| 133 |
+
},
|
| 134 |
+
"agent_work_history": {
|
| 135 |
+
"comment": null,
|
| 136 |
+
"columns": [
|
| 137 |
+
{
|
| 138 |
+
"name": "updated_at",
|
| 139 |
+
"data_type": "timestamp without time zone",
|
| 140 |
+
"comment": null
|
| 141 |
+
},
|
| 142 |
+
{
|
| 143 |
+
"name": "agent_id",
|
| 144 |
+
"data_type": "integer",
|
| 145 |
+
"comment": null
|
| 146 |
+
},
|
| 147 |
+
{
|
| 148 |
+
"name": "class_id",
|
| 149 |
+
"data_type": "integer",
|
| 150 |
+
"comment": null
|
| 151 |
+
},
|
| 152 |
+
{
|
| 153 |
+
"name": "task_id",
|
| 154 |
+
"data_type": "integer",
|
| 155 |
+
"comment": null
|
| 156 |
+
},
|
| 157 |
+
{
|
| 158 |
+
"name": "start_date",
|
| 159 |
+
"data_type": "date",
|
| 160 |
+
"comment": null
|
| 161 |
+
},
|
| 162 |
+
{
|
| 163 |
+
"name": "end_date",
|
| 164 |
+
"data_type": "date",
|
| 165 |
+
"comment": null
|
| 166 |
+
},
|
| 167 |
+
{
|
| 168 |
+
"name": "created_at",
|
| 169 |
+
"data_type": "timestamp without time zone",
|
| 170 |
+
"comment": null
|
| 171 |
+
},
|
| 172 |
+
{
|
| 173 |
+
"name": "id",
|
| 174 |
+
"data_type": "integer",
|
| 175 |
+
"comment": null
|
| 176 |
+
},
|
| 177 |
+
{
|
| 178 |
+
"name": "reassignment_id",
|
| 179 |
+
"data_type": "integer",
|
| 180 |
+
"comment": null
|
| 181 |
+
},
|
| 182 |
+
{
|
| 183 |
+
"name": "role",
|
| 184 |
+
"data_type": "character varying",
|
| 185 |
+
"comment": null
|
| 186 |
+
},
|
| 187 |
+
{
|
| 188 |
+
"name": "performance_notes",
|
| 189 |
+
"data_type": "text",
|
| 190 |
+
"comment": null
|
| 191 |
+
}
|
| 192 |
+
],
|
| 193 |
+
"foreign_keys": [
|
| 194 |
+
{
|
| 195 |
+
"column": "agent_id",
|
| 196 |
+
"references": {
|
| 197 |
+
"table": "agents",
|
| 198 |
+
"column": "id"
|
| 199 |
+
}
|
| 200 |
+
},
|
| 201 |
+
{
|
| 202 |
+
"column": "class_id",
|
| 203 |
+
"references": {
|
| 204 |
+
"table": "classes",
|
| 205 |
+
"column": "id"
|
| 206 |
+
}
|
| 207 |
+
},
|
| 208 |
+
{
|
| 209 |
+
"column": "task_id",
|
| 210 |
+
"references": {
|
| 211 |
+
"table": "tasks",
|
| 212 |
+
"column": "id"
|
| 213 |
+
}
|
| 214 |
+
}
|
| 215 |
+
]
|
| 216 |
+
},
|
| 217 |
+
"agents": {
|
| 218 |
+
"comment": null,
|
| 219 |
+
"columns": [
|
| 220 |
+
{
|
| 221 |
+
"name": "updated_at",
|
| 222 |
+
"data_type": "timestamp without time zone",
|
| 223 |
+
"comment": null
|
| 224 |
+
},
|
| 225 |
+
{
|
| 226 |
+
"name": "experience",
|
| 227 |
+
"data_type": "integer",
|
| 228 |
+
"comment": null
|
| 229 |
+
},
|
| 230 |
+
{
|
| 231 |
+
"name": "current_workload",
|
| 232 |
+
"data_type": "integer",
|
| 233 |
+
"comment": null
|
| 234 |
+
},
|
| 235 |
+
{
|
| 236 |
+
"name": "created_at",
|
| 237 |
+
"data_type": "timestamp without time zone",
|
| 238 |
+
"comment": null
|
| 239 |
+
},
|
| 240 |
+
{
|
| 241 |
+
"name": "id",
|
| 242 |
+
"data_type": "integer",
|
| 243 |
+
"comment": null
|
| 244 |
+
},
|
| 245 |
+
{
|
| 246 |
+
"name": "location",
|
| 247 |
+
"data_type": "character varying",
|
| 248 |
+
"comment": null
|
| 249 |
+
},
|
| 250 |
+
{
|
| 251 |
+
"name": "status",
|
| 252 |
+
"data_type": "character varying",
|
| 253 |
+
"comment": null
|
| 254 |
+
},
|
| 255 |
+
{
|
| 256 |
+
"name": "name",
|
| 257 |
+
"data_type": "character varying",
|
| 258 |
+
"comment": null
|
| 259 |
+
},
|
| 260 |
+
{
|
| 261 |
+
"name": "email",
|
| 262 |
+
"data_type": "character varying",
|
| 263 |
+
"comment": null
|
| 264 |
+
},
|
| 265 |
+
{
|
| 266 |
+
"name": "phone",
|
| 267 |
+
"data_type": "character varying",
|
| 268 |
+
"comment": null
|
| 269 |
+
},
|
| 270 |
+
{
|
| 271 |
+
"name": "specialization",
|
| 272 |
+
"data_type": "character varying",
|
| 273 |
+
"comment": null
|
| 274 |
+
}
|
| 275 |
+
],
|
| 276 |
+
"foreign_keys": []
|
| 277 |
+
},
|
| 278 |
+
"assessments": {
|
| 279 |
+
"comment": null,
|
| 280 |
+
"columns": [
|
| 281 |
+
{
|
| 282 |
+
"name": "id",
|
| 283 |
+
"data_type": "integer",
|
| 284 |
+
"comment": null
|
| 285 |
+
},
|
| 286 |
+
{
|
| 287 |
+
"name": "class_id",
|
| 288 |
+
"data_type": "integer",
|
| 289 |
+
"comment": null
|
| 290 |
+
},
|
| 291 |
+
{
|
| 292 |
+
"name": "assessment_date",
|
| 293 |
+
"data_type": "date",
|
| 294 |
+
"comment": null
|
| 295 |
+
},
|
| 296 |
+
{
|
| 297 |
+
"name": "created_at",
|
| 298 |
+
"data_type": "timestamp without time zone",
|
| 299 |
+
"comment": null
|
| 300 |
+
},
|
| 301 |
+
{
|
| 302 |
+
"name": "updated_at",
|
| 303 |
+
"data_type": "timestamp without time zone",
|
| 304 |
+
"comment": null
|
| 305 |
+
},
|
| 306 |
+
{
|
| 307 |
+
"name": "assessment_type",
|
| 308 |
+
"data_type": "character varying",
|
| 309 |
+
"comment": null
|
| 310 |
+
},
|
| 311 |
+
{
|
| 312 |
+
"name": "assessor_name",
|
| 313 |
+
"data_type": "character varying",
|
| 314 |
+
"comment": null
|
| 315 |
+
},
|
| 316 |
+
{
|
| 317 |
+
"name": "result",
|
| 318 |
+
"data_type": "character varying",
|
| 319 |
+
"comment": null
|
| 320 |
+
}
|
| 321 |
+
],
|
| 322 |
+
"foreign_keys": [
|
| 323 |
+
{
|
| 324 |
+
"column": "class_id",
|
| 325 |
+
"references": {
|
| 326 |
+
"table": "classes",
|
| 327 |
+
"column": "id"
|
| 328 |
+
}
|
| 329 |
+
}
|
| 330 |
+
]
|
| 331 |
+
},
|
| 332 |
+
"classes": {
|
| 333 |
+
"comment": null,
|
| 334 |
+
"columns": [
|
| 335 |
+
{
|
| 336 |
+
"name": "id",
|
| 337 |
+
"data_type": "integer",
|
| 338 |
+
"comment": null
|
| 339 |
+
},
|
| 340 |
+
{
|
| 341 |
+
"name": "client_id",
|
| 342 |
+
"data_type": "integer",
|
| 343 |
+
"comment": null
|
| 344 |
+
},
|
| 345 |
+
{
|
| 346 |
+
"name": "start_date",
|
| 347 |
+
"data_type": "date",
|
| 348 |
+
"comment": null
|
| 349 |
+
},
|
| 350 |
+
{
|
| 351 |
+
"name": "end_date",
|
| 352 |
+
"data_type": "date",
|
| 353 |
+
"comment": null
|
| 354 |
+
},
|
| 355 |
+
{
|
| 356 |
+
"name": "created_at",
|
| 357 |
+
"data_type": "timestamp without time zone",
|
| 358 |
+
"comment": null
|
| 359 |
+
},
|
| 360 |
+
{
|
| 361 |
+
"name": "updated_at",
|
| 362 |
+
"data_type": "timestamp without time zone",
|
| 363 |
+
"comment": null
|
| 364 |
+
},
|
| 365 |
+
{
|
| 366 |
+
"name": "attendance_status",
|
| 367 |
+
"data_type": "character varying",
|
| 368 |
+
"comment": null
|
| 369 |
+
},
|
| 370 |
+
{
|
| 371 |
+
"name": "progression_status",
|
| 372 |
+
"data_type": "character varying",
|
| 373 |
+
"comment": null
|
| 374 |
+
},
|
| 375 |
+
{
|
| 376 |
+
"name": "subject",
|
| 377 |
+
"data_type": "character varying",
|
| 378 |
+
"comment": null
|
| 379 |
+
},
|
| 380 |
+
{
|
| 381 |
+
"name": "site",
|
| 382 |
+
"data_type": "character varying",
|
| 383 |
+
"comment": null
|
| 384 |
+
},
|
| 385 |
+
{
|
| 386 |
+
"name": "phase",
|
| 387 |
+
"data_type": "character varying",
|
| 388 |
+
"comment": null
|
| 389 |
+
},
|
| 390 |
+
{
|
| 391 |
+
"name": "marketer",
|
| 392 |
+
"data_type": "character varying",
|
| 393 |
+
"comment": null
|
| 394 |
+
},
|
| 395 |
+
{
|
| 396 |
+
"name": "status",
|
| 397 |
+
"data_type": "character varying",
|
| 398 |
+
"comment": null
|
| 399 |
+
}
|
| 400 |
+
],
|
| 401 |
+
"foreign_keys": [
|
| 402 |
+
{
|
| 403 |
+
"column": "client_id",
|
| 404 |
+
"references": {
|
| 405 |
+
"table": "clients",
|
| 406 |
+
"column": "id"
|
| 407 |
+
}
|
| 408 |
+
}
|
| 409 |
+
]
|
| 410 |
+
},
|
| 411 |
+
"clients": {
|
| 412 |
+
"comment": null,
|
| 413 |
+
"columns": [
|
| 414 |
+
{
|
| 415 |
+
"name": "id",
|
| 416 |
+
"data_type": "integer",
|
| 417 |
+
"comment": null
|
| 418 |
+
},
|
| 419 |
+
{
|
| 420 |
+
"name": "created_at",
|
| 421 |
+
"data_type": "timestamp without time zone",
|
| 422 |
+
"comment": null
|
| 423 |
+
},
|
| 424 |
+
{
|
| 425 |
+
"name": "updated_at",
|
| 426 |
+
"data_type": "timestamp without time zone",
|
| 427 |
+
"comment": null
|
| 428 |
+
},
|
| 429 |
+
{
|
| 430 |
+
"name": "email",
|
| 431 |
+
"data_type": "character varying",
|
| 432 |
+
"comment": null
|
| 433 |
+
},
|
| 434 |
+
{
|
| 435 |
+
"name": "phone",
|
| 436 |
+
"data_type": "character varying",
|
| 437 |
+
"comment": null
|
| 438 |
+
},
|
| 439 |
+
{
|
| 440 |
+
"name": "address",
|
| 441 |
+
"data_type": "text",
|
| 442 |
+
"comment": null
|
| 443 |
+
},
|
| 444 |
+
{
|
| 445 |
+
"name": "status",
|
| 446 |
+
"data_type": "character varying",
|
| 447 |
+
"comment": null
|
| 448 |
+
},
|
| 449 |
+
{
|
| 450 |
+
"name": "name",
|
| 451 |
+
"data_type": "character varying",
|
| 452 |
+
"comment": null
|
| 453 |
+
},
|
| 454 |
+
{
|
| 455 |
+
"name": "contact_person",
|
| 456 |
+
"data_type": "character varying",
|
| 457 |
+
"comment": null
|
| 458 |
+
}
|
| 459 |
+
],
|
| 460 |
+
"foreign_keys": []
|
| 461 |
+
},
|
| 462 |
+
"deliveries": {
|
| 463 |
+
"comment": null,
|
| 464 |
+
"columns": [
|
| 465 |
+
{
|
| 466 |
+
"name": "delivery_date",
|
| 467 |
+
"data_type": "date",
|
| 468 |
+
"comment": null
|
| 469 |
+
},
|
| 470 |
+
{
|
| 471 |
+
"name": "class_id",
|
| 472 |
+
"data_type": "integer",
|
| 473 |
+
"comment": null
|
| 474 |
+
},
|
| 475 |
+
{
|
| 476 |
+
"name": "created_at",
|
| 477 |
+
"data_type": "timestamp without time zone",
|
| 478 |
+
"comment": null
|
| 479 |
+
},
|
| 480 |
+
{
|
| 481 |
+
"name": "updated_at",
|
| 482 |
+
"data_type": "timestamp without time zone",
|
| 483 |
+
"comment": null
|
| 484 |
+
},
|
| 485 |
+
{
|
| 486 |
+
"name": "id",
|
| 487 |
+
"data_type": "integer",
|
| 488 |
+
"comment": null
|
| 489 |
+
},
|
| 490 |
+
{
|
| 491 |
+
"name": "delivery_type",
|
| 492 |
+
"data_type": "character varying",
|
| 493 |
+
"comment": null
|
| 494 |
+
},
|
| 495 |
+
{
|
| 496 |
+
"name": "status",
|
| 497 |
+
"data_type": "character varying",
|
| 498 |
+
"comment": null
|
| 499 |
+
}
|
| 500 |
+
],
|
| 501 |
+
"foreign_keys": [
|
| 502 |
+
{
|
| 503 |
+
"column": "class_id",
|
| 504 |
+
"references": {
|
| 505 |
+
"table": "classes",
|
| 506 |
+
"column": "id"
|
| 507 |
+
}
|
| 508 |
+
}
|
| 509 |
+
]
|
| 510 |
+
},
|
| 511 |
+
"events": {
|
| 512 |
+
"comment": null,
|
| 513 |
+
"columns": [
|
| 514 |
+
{
|
| 515 |
+
"name": "id",
|
| 516 |
+
"data_type": "integer",
|
| 517 |
+
"comment": null
|
| 518 |
+
},
|
| 519 |
+
{
|
| 520 |
+
"name": "created_at",
|
| 521 |
+
"data_type": "timestamp without time zone",
|
| 522 |
+
"comment": null
|
| 523 |
+
},
|
| 524 |
+
{
|
| 525 |
+
"name": "updated_at",
|
| 526 |
+
"data_type": "timestamp without time zone",
|
| 527 |
+
"comment": null
|
| 528 |
+
},
|
| 529 |
+
{
|
| 530 |
+
"name": "client_id",
|
| 531 |
+
"data_type": "integer",
|
| 532 |
+
"comment": null
|
| 533 |
+
},
|
| 534 |
+
{
|
| 535 |
+
"name": "class_id",
|
| 536 |
+
"data_type": "integer",
|
| 537 |
+
"comment": null
|
| 538 |
+
},
|
| 539 |
+
{
|
| 540 |
+
"name": "event_date",
|
| 541 |
+
"data_type": "date",
|
| 542 |
+
"comment": null
|
| 543 |
+
},
|
| 544 |
+
{
|
| 545 |
+
"name": "reminder_date",
|
| 546 |
+
"data_type": "date",
|
| 547 |
+
"comment": null
|
| 548 |
+
},
|
| 549 |
+
{
|
| 550 |
+
"name": "name",
|
| 551 |
+
"data_type": "character varying",
|
| 552 |
+
"comment": null
|
| 553 |
+
},
|
| 554 |
+
{
|
| 555 |
+
"name": "event_type",
|
| 556 |
+
"data_type": "character varying",
|
| 557 |
+
"comment": null
|
| 558 |
+
}
|
| 559 |
+
],
|
| 560 |
+
"foreign_keys": [
|
| 561 |
+
{
|
| 562 |
+
"column": "class_id",
|
| 563 |
+
"references": {
|
| 564 |
+
"table": "classes",
|
| 565 |
+
"column": "id"
|
| 566 |
+
}
|
| 567 |
+
},
|
| 568 |
+
{
|
| 569 |
+
"column": "client_id",
|
| 570 |
+
"references": {
|
| 571 |
+
"table": "clients",
|
| 572 |
+
"column": "id"
|
| 573 |
+
}
|
| 574 |
+
}
|
| 575 |
+
]
|
| 576 |
+
},
|
| 577 |
+
"learners": {
|
| 578 |
+
"comment": null,
|
| 579 |
+
"columns": [
|
| 580 |
+
{
|
| 581 |
+
"name": "id",
|
| 582 |
+
"data_type": "integer",
|
| 583 |
+
"comment": null
|
| 584 |
+
},
|
| 585 |
+
{
|
| 586 |
+
"name": "class_id",
|
| 587 |
+
"data_type": "integer",
|
| 588 |
+
"comment": null
|
| 589 |
+
},
|
| 590 |
+
{
|
| 591 |
+
"name": "created_at",
|
| 592 |
+
"data_type": "timestamp without time zone",
|
| 593 |
+
"comment": null
|
| 594 |
+
},
|
| 595 |
+
{
|
| 596 |
+
"name": "updated_at",
|
| 597 |
+
"data_type": "timestamp without time zone",
|
| 598 |
+
"comment": null
|
| 599 |
+
},
|
| 600 |
+
{
|
| 601 |
+
"name": "id_number",
|
| 602 |
+
"data_type": "character varying",
|
| 603 |
+
"comment": null
|
| 604 |
+
},
|
| 605 |
+
{
|
| 606 |
+
"name": "name",
|
| 607 |
+
"data_type": "character varying",
|
| 608 |
+
"comment": null
|
| 609 |
+
},
|
| 610 |
+
{
|
| 611 |
+
"name": "gender",
|
| 612 |
+
"data_type": "character varying",
|
| 613 |
+
"comment": null
|
| 614 |
+
},
|
| 615 |
+
{
|
| 616 |
+
"name": "race",
|
| 617 |
+
"data_type": "character varying",
|
| 618 |
+
"comment": null
|
| 619 |
+
}
|
| 620 |
+
],
|
| 621 |
+
"foreign_keys": [
|
| 622 |
+
{
|
| 623 |
+
"column": "class_id",
|
| 624 |
+
"references": {
|
| 625 |
+
"table": "classes",
|
| 626 |
+
"column": "id"
|
| 627 |
+
}
|
| 628 |
+
}
|
| 629 |
+
]
|
| 630 |
+
},
|
| 631 |
+
"progressions": {
|
| 632 |
+
"comment": null,
|
| 633 |
+
"columns": [
|
| 634 |
+
{
|
| 635 |
+
"name": "updated_at",
|
| 636 |
+
"data_type": "timestamp without time zone",
|
| 637 |
+
"comment": null
|
| 638 |
+
},
|
| 639 |
+
{
|
| 640 |
+
"name": "class_id",
|
| 641 |
+
"data_type": "integer",
|
| 642 |
+
"comment": null
|
| 643 |
+
},
|
| 644 |
+
{
|
| 645 |
+
"name": "learner_id",
|
| 646 |
+
"data_type": "integer",
|
| 647 |
+
"comment": null
|
| 648 |
+
},
|
| 649 |
+
{
|
| 650 |
+
"name": "created_at",
|
| 651 |
+
"data_type": "timestamp without time zone",
|
| 652 |
+
"comment": null
|
| 653 |
+
},
|
| 654 |
+
{
|
| 655 |
+
"name": "id",
|
| 656 |
+
"data_type": "integer",
|
| 657 |
+
"comment": null
|
| 658 |
+
},
|
| 659 |
+
{
|
| 660 |
+
"name": "completion_date",
|
| 661 |
+
"data_type": "date",
|
| 662 |
+
"comment": null
|
| 663 |
+
},
|
| 664 |
+
{
|
| 665 |
+
"name": "progression_level",
|
| 666 |
+
"data_type": "character varying",
|
| 667 |
+
"comment": null
|
| 668 |
+
},
|
| 669 |
+
{
|
| 670 |
+
"name": "status",
|
| 671 |
+
"data_type": "character varying",
|
| 672 |
+
"comment": null
|
| 673 |
+
}
|
| 674 |
+
],
|
| 675 |
+
"foreign_keys": [
|
| 676 |
+
{
|
| 677 |
+
"column": "class_id",
|
| 678 |
+
"references": {
|
| 679 |
+
"table": "classes",
|
| 680 |
+
"column": "id"
|
| 681 |
+
}
|
| 682 |
+
},
|
| 683 |
+
{
|
| 684 |
+
"column": "learner_id",
|
| 685 |
+
"references": {
|
| 686 |
+
"table": "learners",
|
| 687 |
+
"column": "id"
|
| 688 |
+
}
|
| 689 |
+
}
|
| 690 |
+
]
|
| 691 |
+
},
|
| 692 |
+
"reports": {
|
| 693 |
+
"comment": null,
|
| 694 |
+
"columns": [
|
| 695 |
+
{
|
| 696 |
+
"name": "related_client_id",
|
| 697 |
+
"data_type": "integer",
|
| 698 |
+
"comment": null
|
| 699 |
+
},
|
| 700 |
+
{
|
| 701 |
+
"name": "created_at",
|
| 702 |
+
"data_type": "timestamp without time zone",
|
| 703 |
+
"comment": null
|
| 704 |
+
},
|
| 705 |
+
{
|
| 706 |
+
"name": "updated_at",
|
| 707 |
+
"data_type": "timestamp without time zone",
|
| 708 |
+
"comment": null
|
| 709 |
+
},
|
| 710 |
+
{
|
| 711 |
+
"name": "related_class_id",
|
| 712 |
+
"data_type": "integer",
|
| 713 |
+
"comment": null
|
| 714 |
+
},
|
| 715 |
+
{
|
| 716 |
+
"name": "id",
|
| 717 |
+
"data_type": "integer",
|
| 718 |
+
"comment": null
|
| 719 |
+
},
|
| 720 |
+
{
|
| 721 |
+
"name": "report_type",
|
| 722 |
+
"data_type": "character varying",
|
| 723 |
+
"comment": null
|
| 724 |
+
},
|
| 725 |
+
{
|
| 726 |
+
"name": "content",
|
| 727 |
+
"data_type": "text",
|
| 728 |
+
"comment": null
|
| 729 |
+
}
|
| 730 |
+
],
|
| 731 |
+
"foreign_keys": [
|
| 732 |
+
{
|
| 733 |
+
"column": "related_class_id",
|
| 734 |
+
"references": {
|
| 735 |
+
"table": "classes",
|
| 736 |
+
"column": "id"
|
| 737 |
+
}
|
| 738 |
+
},
|
| 739 |
+
{
|
| 740 |
+
"column": "related_client_id",
|
| 741 |
+
"references": {
|
| 742 |
+
"table": "clients",
|
| 743 |
+
"column": "id"
|
| 744 |
+
}
|
| 745 |
+
}
|
| 746 |
+
]
|
| 747 |
+
},
|
| 748 |
+
"tasks": {
|
| 749 |
+
"comment": null,
|
| 750 |
+
"columns": [
|
| 751 |
+
{
|
| 752 |
+
"name": "id",
|
| 753 |
+
"data_type": "integer",
|
| 754 |
+
"comment": null
|
| 755 |
+
},
|
| 756 |
+
{
|
| 757 |
+
"name": "event_id",
|
| 758 |
+
"data_type": "integer",
|
| 759 |
+
"comment": null
|
| 760 |
+
},
|
| 761 |
+
{
|
| 762 |
+
"name": "due_date",
|
| 763 |
+
"data_type": "date",
|
| 764 |
+
"comment": null
|
| 765 |
+
},
|
| 766 |
+
{
|
| 767 |
+
"name": "created_at",
|
| 768 |
+
"data_type": "timestamp without time zone",
|
| 769 |
+
"comment": null
|
| 770 |
+
},
|
| 771 |
+
{
|
| 772 |
+
"name": "updated_at",
|
| 773 |
+
"data_type": "timestamp without time zone",
|
| 774 |
+
"comment": null
|
| 775 |
+
},
|
| 776 |
+
{
|
| 777 |
+
"name": "description",
|
| 778 |
+
"data_type": "text",
|
| 779 |
+
"comment": null
|
| 780 |
+
},
|
| 781 |
+
{
|
| 782 |
+
"name": "assigned_to",
|
| 783 |
+
"data_type": "character varying",
|
| 784 |
+
"comment": null
|
| 785 |
+
},
|
| 786 |
+
{
|
| 787 |
+
"name": "status",
|
| 788 |
+
"data_type": "character varying",
|
| 789 |
+
"comment": null
|
| 790 |
+
}
|
| 791 |
+
],
|
| 792 |
+
"foreign_keys": [
|
| 793 |
+
{
|
| 794 |
+
"column": "event_id",
|
| 795 |
+
"references": {
|
| 796 |
+
"table": "events",
|
| 797 |
+
"column": "id"
|
| 798 |
+
}
|
| 799 |
+
}
|
| 800 |
+
]
|
| 801 |
+
}
|
| 802 |
+
}
|
schema.md
ADDED
|
@@ -0,0 +1,195 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
## Request
|
| 2 |
+
IMPORTANT ! Use the "Database Schema" below
|
| 3 |
+
|
| 4 |
+
Here’s a list describing what I want you to asist me with:
|
| 5 |
+
|
| 6 |
+
1. **Generate Demo Data**: Create demo data for a PostgreSQL database with at least 25 entries per table, considering the relationships between the tables.
|
| 7 |
+
|
| 8 |
+
2. **Focus on South African Context**: The demo data must reflect South African specifics, such as:
|
| 9 |
+
- South African names for agents, clients, and learners.
|
| 10 |
+
- South African locations (cities and towns) for agents, clients, and class sites.
|
| 11 |
+
- South African phone numbers (+27 format).
|
| 12 |
+
- Realistic South African ID numbers.
|
| 13 |
+
|
| 14 |
+
3. **Tables to Include**: Populate demo data for the following 13 tables:
|
| 15 |
+
- **agents**
|
| 16 |
+
- **clients**
|
| 17 |
+
- **classes**
|
| 18 |
+
- **learners**
|
| 19 |
+
- **assessments**
|
| 20 |
+
- **progressions**
|
| 21 |
+
- **tasks**
|
| 22 |
+
- **deliveries**
|
| 23 |
+
- **events**
|
| 24 |
+
- **reports**
|
| 25 |
+
- **agent_assignments**
|
| 26 |
+
- **agent_availability**
|
| 27 |
+
- **agent_work_history**
|
| 28 |
+
|
| 29 |
+
4. **SQL Format**: The data should be returned in SQL `INSERT INTO` statements with multiple rows, following a format like:
|
| 30 |
+
```sql
|
| 31 |
+
INSERT INTO agents (name, email, phone, specialization, experience, current_workload, location, status, created_at, updated_at) VALUES
|
| 32 |
+
('Thabo Mokoena', 'thabo.mokoena@example.co.za', '+27831234567', 'Math Tutor', 5, 3, 'Johannesburg', 'Active', NOW(), NOW()),
|
| 33 |
+
('Naledi Khumalo', 'naledi.khumalo@example.co.za', '+27836543210', 'Science Instructor', 8, 2, 'Pretoria', 'Active', NOW(), NOW());
|
| 34 |
+
```
|
| 35 |
+
|
| 36 |
+
5. **Ensure Data Consistency**: The generated data should respect the foreign key relationships between the tables, ensuring that IDs referenced in one table (e.g., class_id) exist in the appropriate related table.
|
| 37 |
+
|
| 38 |
+
6. **Provide the Data as a Downloadable SQL File**: Finally, package the SQL insertions for all 13 tables into a downloadable `.sql` file.
|
| 39 |
+
|
| 40 |
+
|
| 41 |
+
|
| 42 |
+
## Database Schema:
|
| 43 |
+
|
| 44 |
+
Table agent_availability (Comment: None):
|
| 45 |
+
- id (integer) (Comment: None)
|
| 46 |
+
- agent_id (integer) (Comment: None)
|
| 47 |
+
- available_date (date) (Comment: None)
|
| 48 |
+
- availability_status (character varying) (Comment: None)
|
| 49 |
+
- reason (text) (Comment: None)
|
| 50 |
+
- created_at (timestamp without time zone) (Comment: None)
|
| 51 |
+
- updated_at (timestamp without time zone) (Comment: None)
|
| 52 |
+
Table agent_work_history (Comment: None):
|
| 53 |
+
- id (integer) (Comment: None)
|
| 54 |
+
- agent_id (integer) (Comment: None)
|
| 55 |
+
- class_id (integer) (Comment: None)
|
| 56 |
+
- task_id (integer) (Comment: None)
|
| 57 |
+
- start_date (date) (Comment: None)
|
| 58 |
+
- end_date (date) (Comment: None)
|
| 59 |
+
- role (character varying) (Comment: None)
|
| 60 |
+
- performance_notes (text) (Comment: None)
|
| 61 |
+
- reassignment_id (integer) (Comment: None)
|
| 62 |
+
- created_at (timestamp without time zone) (Comment: None)
|
| 63 |
+
- updated_at (timestamp without time zone) (Comment: None)
|
| 64 |
+
Foreign Keys:
|
| 65 |
+
- class_id references classes(id)
|
| 66 |
+
- task_id references tasks(id)
|
| 67 |
+
Table agents (Comment: None):
|
| 68 |
+
- id (integer) (Comment: None)
|
| 69 |
+
- name (character varying) (Comment: None)
|
| 70 |
+
- email (character varying) (Comment: None)
|
| 71 |
+
- phone (character varying) (Comment: None)
|
| 72 |
+
- specialization (character varying) (Comment: None)
|
| 73 |
+
- experience (integer) (Comment: None)
|
| 74 |
+
- current_workload (integer) (Comment: None)
|
| 75 |
+
- location (character varying) (Comment: None)
|
| 76 |
+
- status (character varying) (Comment: None)
|
| 77 |
+
- created_at (timestamp without time zone) (Comment: None)
|
| 78 |
+
- updated_at (timestamp without time zone) (Comment: None)
|
| 79 |
+
- sms (character varying) (Comment: None)
|
| 80 |
+
Table assessments (Comment: None):
|
| 81 |
+
- id (integer) (Comment: None)
|
| 82 |
+
- class_id (integer) (Comment: None)
|
| 83 |
+
- assessment_type (character varying) (Comment: None)
|
| 84 |
+
- assessment_date (date) (Comment: None)
|
| 85 |
+
- result (character varying) (Comment: None)
|
| 86 |
+
- assessor_name (character varying) (Comment: None)
|
| 87 |
+
- created_at (timestamp without time zone) (Comment: None)
|
| 88 |
+
- updated_at (timestamp without time zone) (Comment: None)
|
| 89 |
+
Foreign Keys:
|
| 90 |
+
- class_id references classes(id)
|
| 91 |
+
Table classes (Comment: None):
|
| 92 |
+
- id (integer) (Comment: None)
|
| 93 |
+
- client_id (integer) (Comment: None)
|
| 94 |
+
- site (character varying) (Comment: None)
|
| 95 |
+
- start_date (date) (Comment: None)
|
| 96 |
+
- end_date (date) (Comment: None)
|
| 97 |
+
- status (character varying) (Comment: None)
|
| 98 |
+
- attendance_status (character varying) (Comment: None)
|
| 99 |
+
- progression_status (character varying) (Comment: None)
|
| 100 |
+
- subject (character varying) (Comment: None)
|
| 101 |
+
- phase (character varying) (Comment: None)
|
| 102 |
+
- marketer (character varying) (Comment: None)
|
| 103 |
+
- created_at (timestamp without time zone) (Comment: None)
|
| 104 |
+
- updated_at (timestamp without time zone) (Comment: None)
|
| 105 |
+
Table clients (Comment: None):
|
| 106 |
+
- id (integer) (Comment: None)
|
| 107 |
+
- name (character varying) (Comment: None)
|
| 108 |
+
- contact_person (character varying) (Comment: None)
|
| 109 |
+
- email (character varying) (Comment: None)
|
| 110 |
+
- phone (character varying) (Comment: None)
|
| 111 |
+
- address (text) (Comment: None)
|
| 112 |
+
- status (character varying) (Comment: None)
|
| 113 |
+
- created_at (timestamp without time zone) (Comment: None)
|
| 114 |
+
- updated_at (timestamp without time zone) (Comment: None)
|
| 115 |
+
Table deliveries (Comment: None):
|
| 116 |
+
- id (integer) (Comment: None)
|
| 117 |
+
- class_id (integer) (Comment: None)
|
| 118 |
+
- delivery_type (character varying) (Comment: None)
|
| 119 |
+
- delivery_date (date) (Comment: None)
|
| 120 |
+
- status (character varying) (Comment: None)
|
| 121 |
+
- created_at (timestamp without time zone) (Comment: None)
|
| 122 |
+
- updated_at (timestamp without time zone) (Comment: None)
|
| 123 |
+
Foreign Keys:
|
| 124 |
+
- class_id references classes(id)
|
| 125 |
+
Table events (Comment: None):
|
| 126 |
+
- id (integer) (Comment: None)
|
| 127 |
+
- name (character varying) (Comment: None)
|
| 128 |
+
- client_id (integer) (Comment: None)
|
| 129 |
+
- class_id (integer) (Comment: None)
|
| 130 |
+
- event_type (character varying) (Comment: None)
|
| 131 |
+
- event_date (date) (Comment: None)
|
| 132 |
+
- reminder_date (date) (Comment: None)
|
| 133 |
+
- created_at (timestamp without time zone) (Comment: None)
|
| 134 |
+
- updated_at (timestamp without time zone) (Comment: None)
|
| 135 |
+
Foreign Keys:
|
| 136 |
+
- class_id references classes(id)
|
| 137 |
+
Table learners (Comment: None):
|
| 138 |
+
- id (integer) (Comment: None)
|
| 139 |
+
- name (character varying) (Comment: None)
|
| 140 |
+
- gender (character varying) (Comment: None)
|
| 141 |
+
- race (character varying) (Comment: None)
|
| 142 |
+
- id_number (character varying) (Comment: None)
|
| 143 |
+
- class_id (integer) (Comment: None)
|
| 144 |
+
- created_at (timestamp without time zone) (Comment: None)
|
| 145 |
+
- updated_at (timestamp without time zone) (Comment: None)
|
| 146 |
+
Foreign Keys:
|
| 147 |
+
- class_id references classes(id)
|
| 148 |
+
Table progressions (Comment: None):
|
| 149 |
+
- id (integer) (Comment: None)
|
| 150 |
+
- class_id (integer) (Comment: None)
|
| 151 |
+
- learner_id (integer) (Comment: None)
|
| 152 |
+
- progression_level (character varying) (Comment: None)
|
| 153 |
+
- completion_date (date) (Comment: None)
|
| 154 |
+
- status (character varying) (Comment: None)
|
| 155 |
+
- created_at (timestamp without time zone) (Comment: None)
|
| 156 |
+
- updated_at (timestamp without time zone) (Comment: None)
|
| 157 |
+
Foreign Keys:
|
| 158 |
+
- class_id references classes(id)
|
| 159 |
+
- learner_id references learners(id)
|
| 160 |
+
Table reports (Comment: None):
|
| 161 |
+
- id (integer) (Comment: None)
|
| 162 |
+
- report_type (character varying) (Comment: None)
|
| 163 |
+
- related_class_id (integer) (Comment: None)
|
| 164 |
+
- related_client_id (integer) (Comment: None)
|
| 165 |
+
- content (text) (Comment: None)
|
| 166 |
+
- created_at (timestamp without time zone) (Comment: None)
|
| 167 |
+
- updated_at (timestamp without time zone) (Comment: None)
|
| 168 |
+
Foreign Keys:
|
| 169 |
+
- related_class_id references classes(id)
|
| 170 |
+
Table tasks (Comment: None):
|
| 171 |
+
- id (integer) (Comment: None)
|
| 172 |
+
- event_id (integer) (Comment: None)
|
| 173 |
+
- description (text) (Comment: None)
|
| 174 |
+
- due_date (date) (Comment: None)
|
| 175 |
+
- status (character varying) (Comment: None)
|
| 176 |
+
- assigned_to (character varying) (Comment: None)
|
| 177 |
+
- created_at (timestamp without time zone) (Comment: None)
|
| 178 |
+
- updated_at (timestamp without time zone) (Comment: None)
|
| 179 |
+
Foreign Keys:
|
| 180 |
+
- event_id references events(id)
|
| 181 |
+
Table agent_assignments (Comment: None):
|
| 182 |
+
- id (integer) (Comment: None)
|
| 183 |
+
- agent_id (integer) (Comment: None)
|
| 184 |
+
- class_id (integer) (Comment: None)
|
| 185 |
+
- task_id (integer) (Comment: None)
|
| 186 |
+
- assignment_date (date) (Comment: None)
|
| 187 |
+
- reassigned_agent_id (integer) (Comment: None)
|
| 188 |
+
- reassignment_date (date) (Comment: None)
|
| 189 |
+
- reassignment_reason (text) (Comment: None)
|
| 190 |
+
- status (character varying) (Comment: None)
|
| 191 |
+
- created_at (timestamp without time zone) (Comment: None)
|
| 192 |
+
- updated_at (timestamp without time zone) (Comment: None)
|
| 193 |
+
Foreign Keys:
|
| 194 |
+
- class_id references classes(id)
|
| 195 |
+
- task_id references tasks(id)
|