arthi.kasturirangan@informa.com commited on
Commit
560d5c2
·
1 Parent(s): 487c78f

Initial Push

Browse files
Files changed (46) hide show
  1. .env.sample +6 -0
  2. .gitattributes +3 -0
  3. Dockerfile +23 -0
  4. README.md +4 -3
  5. app/__init__.py +3 -0
  6. app/__pycache__/__init__.cpython-311.pyc +0 -0
  7. app/__pycache__/__init__.cpython-312.pyc +0 -0
  8. app/__pycache__/main.cpython-312.pyc +0 -0
  9. app/agent/.gitignore +176 -0
  10. app/agent/.langgraph_api/.langgraph_checkpoint.1.pckl +0 -0
  11. app/agent/.langgraph_api/.langgraph_checkpoint.2.pckl +0 -0
  12. app/agent/.langgraph_api/.langgraph_checkpoint.3.pckl +0 -0
  13. app/agent/.langgraph_api/.langgraph_ops.pckl +0 -0
  14. app/agent/.langgraph_api/.langgraph_retry_counter.pckl +0 -0
  15. app/agent/.langgraph_api/store.pckl +0 -0
  16. app/agent/.langgraph_api/store.vectors.pckl +0 -0
  17. app/agent/__init__.py +3 -0
  18. app/agent/configuration.py +48 -0
  19. app/agent/graph.py +167 -0
  20. app/agent/langgraph.json +7 -0
  21. app/agent/prompts.py +37 -0
  22. app/agent/state.py +69 -0
  23. app/agent/tools.py +80 -0
  24. app/agent/utils.py +67 -0
  25. app/api/__init__.py +3 -0
  26. app/api/__pycache__/__init__.cpython-312.pyc +0 -0
  27. app/api/__pycache__/insights.cpython-312.pyc +0 -0
  28. app/api/insights.py +268 -0
  29. app/api/query.py +17 -0
  30. app/db/__init__.py +3 -0
  31. app/db/__pycache__/__init__.cpython-312.pyc +0 -0
  32. app/db/__pycache__/database.cpython-312.pyc +0 -0
  33. app/db/__pycache__/models.cpython-312.pyc +0 -0
  34. app/db/database.py +27 -0
  35. app/db/models.py +211 -0
  36. app/main.py +64 -0
  37. app/utils/helpers.py +23 -0
  38. data/README.txt +47 -0
  39. data/sqlite-sakila-db/ReadME.txt +16 -0
  40. data/sqlite-sakila-db/sqlite-sakila-delete-data.sql +45 -0
  41. data/sqlite-sakila-db/sqlite-sakila-drop-objects.sql +68 -0
  42. data/sqlite-sakila-db/sqlite-sakila-insert-data.sql +0 -0
  43. data/sqlite-sakila-db/sqlite-sakila-schema.sql +644 -0
  44. package-lock.json +6 -0
  45. pyproject.toml +25 -0
  46. requirements.txt +11 -0
.env.sample ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ LANGCHAIN_TRACING_V2=
2
+ LANGCHAIN_ENDPOINT=
3
+ LANGCHAIN_PROJECT=
4
+ LANGSMITH_API_KEY=
5
+ OPENAI_API_KEY=
6
+
.gitattributes CHANGED
@@ -33,3 +33,6 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ data/sqlite-sakila.db filter=lfs diff=lfs merge=lfs -text
37
+ data/SQLite3[[:space:]]Sakila[[:space:]]Sample[[:space:]]Database[[:space:]]ERD.png filter=lfs diff=lfs merge=lfs -text
38
+ data/sqlite-sakila-db/sqlite-sakila.sq filter=lfs diff=lfs merge=lfs -text
Dockerfile ADDED
@@ -0,0 +1,23 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ FROM python:3.12-slim
2
+
3
+ WORKDIR /app
4
+
5
+ # Install system dependencies
6
+ RUN apt-get update && apt-get install -y \
7
+ build-essential \
8
+ && rm -rf /var/lib/apt/lists/*
9
+
10
+ # Copy requirements first to leverage Docker cache
11
+ COPY requirements.txt .
12
+
13
+ # Install Python dependencies
14
+ RUN pip install --no-cache-dir -r requirements.txt
15
+
16
+ # Copy the rest of the application
17
+ COPY . .
18
+
19
+ # Expose the port the app runs on
20
+ EXPOSE 8000
21
+
22
+ # Command to run the application
23
+ CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000"]
README.md CHANGED
@@ -1,10 +1,11 @@
1
  ---
2
  title: Text To Sql Agent Api
3
- emoji: 🌍
4
- colorFrom: yellow
5
- colorTo: gray
6
  sdk: docker
7
  pinned: false
 
8
  ---
9
 
10
  Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
 
1
  ---
2
  title: Text To Sql Agent Api
3
+ emoji: 🐨
4
+ colorFrom: indigo
5
+ colorTo: indigo
6
  sdk: docker
7
  pinned: false
8
+ short_description: SQL agent to converse with structured databases
9
  ---
10
 
11
  Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
app/__init__.py ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ """
2
+ InsightCopilot backend application package.
3
+ """
app/__pycache__/__init__.cpython-311.pyc ADDED
Binary file (251 Bytes). View file
 
app/__pycache__/__init__.cpython-312.pyc ADDED
Binary file (179 Bytes). View file
 
app/__pycache__/main.cpython-312.pyc ADDED
Binary file (2.15 kB). View file
 
app/agent/.gitignore ADDED
@@ -0,0 +1,176 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Byte-compiled / optimized / DLL files
2
+ __pycache__/
3
+ *.py[cod]
4
+ *$py.class
5
+
6
+ # C extensions
7
+ *.so
8
+
9
+ # Distribution / packaging
10
+ .Python
11
+ build/
12
+ develop-eggs/
13
+ dist/
14
+ downloads/
15
+ eggs/
16
+ .eggs/
17
+ lib/
18
+ lib64/
19
+ parts/
20
+ sdist/
21
+ var/
22
+ wheels/
23
+ share/python-wheels/
24
+ *.egg-info/
25
+ .installed.cfg
26
+ *.egg
27
+ MANIFEST
28
+
29
+ # PyInstaller
30
+ # Usually these files are written by a python script from a template
31
+ # before PyInstaller builds the exe, so as to inject date/other infos into it.
32
+ *.manifest
33
+ *.spec
34
+
35
+ # Installer logs
36
+ pip-log.txt
37
+ pip-delete-this-directory.txt
38
+
39
+ # Unit test / coverage reports
40
+ htmlcov/
41
+ .tox/
42
+ .nox/
43
+ .coverage
44
+ .coverage.*
45
+ .cache
46
+ nosetests.xml
47
+ coverage.xml
48
+ *.cover
49
+ *.py,cover
50
+ .hypothesis/
51
+ .pytest_cache/
52
+ cover/
53
+
54
+ # Translations
55
+ *.mo
56
+ *.pot
57
+
58
+ # Django stuff:
59
+ *.log
60
+ local_settings.py
61
+ db.sqlite3
62
+ db.sqlite3-journal
63
+
64
+ # Flask stuff:
65
+ instance/
66
+ .webassets-cache
67
+
68
+ # Scrapy stuff:
69
+ .scrapy
70
+
71
+ # Sphinx documentation
72
+ docs/_build/
73
+
74
+ # PyBuilder
75
+ .pybuilder/
76
+ target/
77
+
78
+ # Jupyter Notebook
79
+ .ipynb_checkpoints
80
+
81
+ # IPython
82
+ profile_default/
83
+ ipython_config.py
84
+
85
+ # pyenv
86
+ # For a library or package, you might want to ignore these files since the code is
87
+ # intended to run in multiple environments; otherwise, check them in:
88
+ # .python-version
89
+
90
+ # pipenv
91
+ # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
92
+ # However, in case of collaboration, if having platform-specific dependencies or dependencies
93
+ # having no cross-platform support, pipenv may install dependencies that don't work, or not
94
+ # install all needed dependencies.
95
+ #Pipfile.lock
96
+
97
+ # UV
98
+ # Similar to Pipfile.lock, it is generally recommended to include uv.lock in version control.
99
+ # This is especially recommended for binary packages to ensure reproducibility, and is more
100
+ # commonly ignored for libraries.
101
+ #uv.lock
102
+
103
+ # poetry
104
+ # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
105
+ # This is especially recommended for binary packages to ensure reproducibility, and is more
106
+ # commonly ignored for libraries.
107
+ # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
108
+ #poetry.lock
109
+
110
+ # pdm
111
+ # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
112
+ #pdm.lock
113
+ # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
114
+ # in version control.
115
+ # https://pdm.fming.dev/latest/usage/project/#working-with-version-control
116
+ .pdm.toml
117
+ .pdm-python
118
+ .pdm-build/
119
+
120
+ # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
121
+ __pypackages__/
122
+
123
+ # Celery stuff
124
+ celerybeat-schedule
125
+ celerybeat.pid
126
+
127
+ # SageMath parsed files
128
+ *.sage.py
129
+
130
+ # Environments
131
+ .env
132
+ .venv
133
+ env/
134
+ venv/
135
+ ENV/
136
+ env.bak/
137
+ venv.bak/
138
+
139
+ # Spyder project settings
140
+ .spyderproject
141
+ .spyproject
142
+
143
+ # Rope project settings
144
+ .ropeproject
145
+
146
+ # mkdocs documentation
147
+ /site
148
+
149
+ # mypy
150
+ .mypy_cache/
151
+ .dmypy.json
152
+ dmypy.json
153
+
154
+ # Pyre type checker
155
+ .pyre/
156
+
157
+ # pytype static type analyzer
158
+ .pytype/
159
+
160
+ # Cython debug symbols
161
+ cython_debug/
162
+
163
+ # PyCharm
164
+ # JetBrains specific template is maintained in a separate JetBrains.gitignore that can
165
+ # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
166
+ # and can be added to the global gitignore or merged into this file. For a more nuclear
167
+ # option (not recommended) you can uncomment the following to ignore the entire idea folder.
168
+ #.idea/
169
+
170
+ # Ruff stuff:
171
+ .ruff_cache/
172
+
173
+ # PyPI configuration file
174
+ .pypirc
175
+
176
+ /data
app/agent/.langgraph_api/.langgraph_checkpoint.1.pckl ADDED
Binary file (6 Bytes). View file
 
app/agent/.langgraph_api/.langgraph_checkpoint.2.pckl ADDED
Binary file (6 Bytes). View file
 
app/agent/.langgraph_api/.langgraph_checkpoint.3.pckl ADDED
Binary file (6 Bytes). View file
 
app/agent/.langgraph_api/.langgraph_ops.pckl ADDED
Binary file (102 Bytes). View file
 
app/agent/.langgraph_api/.langgraph_retry_counter.pckl ADDED
Binary file (6 Bytes). View file
 
app/agent/.langgraph_api/store.pckl ADDED
Binary file (6 Bytes). View file
 
app/agent/.langgraph_api/store.vectors.pckl ADDED
Binary file (6 Bytes). View file
 
app/agent/__init__.py ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ """
2
+ LangGraph agent package.
3
+ """
app/agent/configuration.py ADDED
@@ -0,0 +1,48 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Define the configurable parameters for the agent."""
2
+
3
+ from __future__ import annotations
4
+
5
+ from dataclasses import dataclass, field, fields
6
+ from typing import Annotated
7
+
8
+ from app.agent import prompts
9
+ from langchain_core.runnables import ensure_config
10
+ from langgraph.config import get_config
11
+
12
+
13
+ @dataclass(kw_only=True)
14
+ class Configuration:
15
+ """The configuration for the agent."""
16
+
17
+ system_prompt: str = field(
18
+ default=prompts.SYSTEM_PROMPT,
19
+ metadata={
20
+ "description": "The system prompt to use for the agent's interactions. "
21
+ "This prompt sets the context and behavior for the agent."
22
+ },
23
+ )
24
+
25
+ model: Annotated[str, {"__template_metadata__": {"kind": "llm"}}] = field(
26
+ default="openai/gpt-4o",
27
+ metadata={
28
+ "description": "The name of the language model to use for the agent's main interactions. "
29
+ "Should be in the form: provider/model-name."
30
+ },
31
+ )
32
+
33
+ max_search_results: int = field(
34
+ default=10,
35
+ metadata={"description": "The maximum number of search results to return for each search query."},
36
+ )
37
+
38
+ @classmethod
39
+ def from_context(cls) -> Configuration:
40
+ """Create a Configuration instance from a RunnableConfig object."""
41
+ try:
42
+ config = get_config()
43
+ except RuntimeError:
44
+ config = None
45
+ config = ensure_config(config)
46
+ configurable = config.get("configurable") or {}
47
+ _fields = {f.name for f in fields(cls) if f.init}
48
+ return cls(**{k: v for k, v in configurable.items() if k in _fields})
app/agent/graph.py ADDED
@@ -0,0 +1,167 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Define a custom Reasoning and Action agent.
2
+
3
+ Works with a chat model with tool calling support.
4
+ """
5
+
6
+ from typing import Dict, List, Literal, cast
7
+
8
+ from app.agent.configuration import Configuration
9
+ from app.agent.state import AgentState, InputState, SQLAgentState
10
+ from app.agent.tools import TOOLS
11
+ from app.agent.utils import load_chat_model
12
+ from dotenv import load_dotenv
13
+ from langchain_core.messages import AIMessage
14
+ from langgraph.checkpoint.memory import MemorySaver
15
+ from langgraph.graph import StateGraph
16
+ from langgraph.prebuilt import ToolNode
17
+
18
+ load_dotenv()
19
+
20
+ # Define the function that calls the model
21
+ async def call_model(state: AgentState) -> Dict[str, List[AIMessage]]:
22
+ """Call the LLM powering our "agent".
23
+
24
+ This function prepares the prompt, initializes the model, and processes the response.
25
+
26
+ Args:
27
+ state (State): The current state of the conversation.
28
+ config (RunnableConfig): Configuration for the model run.
29
+
30
+ Returns:
31
+ dict: A dictionary containing the model's response message.
32
+ """
33
+ configuration = Configuration.from_context()
34
+
35
+ # Initialize the model with tool binding. Change the model or add more tools here.
36
+ model = load_chat_model(configuration.model).bind_tools(TOOLS)
37
+
38
+ # Format the system prompt. Customize this to change the agent's behavior.
39
+ system_message = configuration.system_prompt
40
+
41
+ # Get the model's response
42
+ response = cast(
43
+ AIMessage,
44
+ await model.ainvoke([{"role": "system", "content": system_message}, *state.messages]),
45
+ )
46
+
47
+ # Handle the case when it's the last step and the model still wants to use a tool
48
+ if state.is_last_step and response.tool_calls:
49
+ return {
50
+ "messages": [
51
+ AIMessage(
52
+ id=response.id,
53
+ content="Sorry, I could not find an answer to your question in the specified number of steps.",
54
+ )
55
+ ]
56
+ }
57
+
58
+ # Return the model's response as a list to be added to existing messages
59
+ return {"messages": [response]}
60
+
61
+
62
+ # Define a new graph
63
+ builder = StateGraph(AgentState, input=InputState, config_schema=Configuration)
64
+
65
+ # Define the two nodes we will cycle between
66
+ builder.add_node(call_model)
67
+ builder.add_node("tools", ToolNode(TOOLS))
68
+
69
+ # Set the entrypoint as `call_model`
70
+ # This means that this node is the first one called
71
+ builder.add_edge("__start__", "call_model")
72
+
73
+
74
+ def route_model_output(state: SQLAgentState) -> Literal["__end__", "tools"]:
75
+ """Determine the next node based on the model's output."""
76
+ last_message = state.messages[-1]
77
+ if not isinstance(last_message, AIMessage):
78
+ raise ValueError(f"Expected AIMessage in output edges, but got {type(last_message).__name__}")
79
+
80
+ # If there is no tool call, then we finish
81
+ if not last_message.tool_calls:
82
+ return "__end__"
83
+
84
+ # If we've exceeded max attempts, end the conversation
85
+ if state.query_attempts >= 3:
86
+ return "__end__"
87
+
88
+ # Otherwise execute the requested actions
89
+ return "tools"
90
+
91
+
92
+ # Add a conditional edge to determine the next step after `call_model`
93
+ builder.add_conditional_edges(
94
+ "call_model",
95
+ # After call_model finishes running, the next node(s) are scheduled
96
+ # based on the output from route_model_output
97
+ route_model_output,
98
+ )
99
+
100
+ # Add a normal edge from `tools` to `call_model`
101
+ # This creates a cycle: after using tools, we always return to the model
102
+ builder.add_edge("tools", "call_model")
103
+
104
+ # Compile the builder into an executable graph
105
+ memory = MemorySaver()
106
+ graph = builder.compile(name="powersim_agent")
107
+
108
+ if __name__ == "__main__":
109
+ import asyncio
110
+
111
+ from langchain_core.messages import HumanMessage
112
+
113
+ async def main():
114
+ # Define the input using proper message format
115
+ input_data = {
116
+ "messages": [
117
+ HumanMessage(content="What is the total revenue?"),
118
+ ]
119
+ }
120
+
121
+ config = {
122
+ "configurable": {
123
+ "thread_id": "12345",
124
+ }
125
+ }
126
+
127
+ # Stream the execution to see what's happening inside
128
+ print("\n=== STARTING AGENT EXECUTION ===\n")
129
+
130
+ # Use astream to see intermediate steps
131
+ async for chunk in graph.astream(input_data, config, stream_mode="updates"):
132
+ for node_name, node_output in chunk.items():
133
+ print(f"\n--- OUTPUT FROM NODE: {node_name} ---")
134
+
135
+ # Extract messages if they exist
136
+ if "messages" in node_output and node_output["messages"]:
137
+ latest_message = node_output["messages"][-1]
138
+
139
+ # Print message content based on type
140
+ print(f"MESSAGE TYPE: {type(latest_message).__name__}")
141
+
142
+ if hasattr(latest_message, "content") and latest_message.content:
143
+ print(f"CONTENT: {latest_message.content[:500]}...")
144
+
145
+ # Print tool calls if present
146
+ if hasattr(latest_message, "tool_calls") and latest_message.tool_calls:
147
+ print(f"TOOL CALLS: {latest_message.tool_calls}")
148
+
149
+ # Handle tool messages specifically
150
+ if hasattr(latest_message, "name") and hasattr(latest_message, "tool_call_id"):
151
+ print(f"TOOL: {latest_message.name}")
152
+ print(f"TOOL CALL ID: {latest_message.tool_call_id}")
153
+ if hasattr(latest_message, "content"):
154
+ print(f"RESULT: {latest_message.content[:500]}...")
155
+
156
+ print("-----------------------------------")
157
+
158
+ print("\n==== CHUNK COMPLETE ====\n")
159
+
160
+ # Get the final response
161
+ final_response = await graph.ainvoke(input_data, config)
162
+
163
+ print("\n=== FINAL RESPONSE ===\n")
164
+ print(final_response)
165
+
166
+ # Run the async main function
167
+ asyncio.run(main())
app/agent/langgraph.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "dependencies": ["."],
3
+ "graphs": {
4
+ "agent": "./app/agent/graph.py:graph"
5
+ },
6
+ "env": ".env"
7
+ }
app/agent/prompts.py ADDED
@@ -0,0 +1,37 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Prompts used by the agent."""
2
+
3
+ SYSTEM_PROMPT = """
4
+ You are an AI agent designed to interact with a SQLite database.
5
+ Your task is to:
6
+ 1. Understand natural language questions
7
+ 2. Convert them into valid SQL queries
8
+ 3. Execute the queries and return meaningful results
9
+
10
+ Guidelines:
11
+ - Always start by examining the database schema using the get_schema tool
12
+ - Write SQL queries that are specific to the question
13
+ - Only query relevant columns
14
+ - Use appropriate JOINs and WHERE clauses
15
+ - Limit results to reasonable numbers (default 10)
16
+ - Handle errors gracefully
17
+ - Never make DML statements (INSERT, UPDATE, DELETE, DROP)
18
+
19
+ When generating SQL:
20
+ - Use proper SQLite syntax
21
+ - Include appropriate table aliases
22
+ - Use meaningful column names in the output
23
+ - Add comments for complex queries
24
+ - Validate query structure before execution
25
+
26
+ If a query fails:
27
+ 1. Analyze the error message
28
+ 2. Check the schema again
29
+ 3. Rewrite the query with corrections
30
+ 4. Try again with the modified query
31
+
32
+ Remember to:
33
+ - Double-check your queries before execution
34
+ - Format results in a clear, readable way
35
+ - Explain your reasoning when necessary
36
+ - Handle edge cases appropriately
37
+ """
app/agent/state.py ADDED
@@ -0,0 +1,69 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Define agent state
2
+ from dataclasses import dataclass, field
3
+ from typing import Annotated, Dict, List, Optional, Sequence
4
+
5
+ from copilotkit import CopilotKitState # noqa: F401
6
+ from langchain_core.messages import AnyMessage
7
+ from langgraph.graph import add_messages
8
+ from langgraph.managed import IsLastStep, RemainingSteps
9
+
10
+
11
+ def merge_lists(a: list, b: list) -> list:
12
+ """Merge two lists by extending the first with the second"""
13
+ return [*a, *b] if isinstance(a, list) and isinstance(b, list) else b
14
+
15
+
16
+ @dataclass
17
+ class InputState:
18
+ """Defines the input state for the agent, representing a narrower interface to the outside world.
19
+
20
+ This class is used to define the initial state and structure of incoming data.
21
+ """
22
+
23
+ messages: Annotated[Sequence[AnyMessage], add_messages] = field(default_factory=list)
24
+ """
25
+ Messages tracking the primary execution state of the agent.
26
+
27
+ Typically accumulates a pattern of:
28
+ 1. HumanMessage - user input
29
+ 2. AIMessage with .tool_calls - agent picking tool(s) to use to collect information
30
+ 3. ToolMessage(s) - the responses (or errors) from the executed tools
31
+ 4. AIMessage without .tool_calls - agent responding in unstructured format to the user
32
+ 5. HumanMessage - user responds with the next conversational turn
33
+
34
+ Steps 2-5 may repeat as needed.
35
+
36
+ The `add_messages` annotation ensures that new messages are merged with existing ones,
37
+ updating by ID to maintain an "append-only" state unless a message with the same ID is provided.
38
+ """
39
+
40
+
41
+ @dataclass
42
+ class AgentState(InputState):
43
+ remaining_steps: RemainingSteps = 25
44
+ is_last_step: IsLastStep = field(default=False)
45
+ progress: Optional[str] = None
46
+
47
+ def items(self):
48
+ """Make AgentState behave like a dictionary for CopilotKit compatibility.
49
+
50
+ This method returns key-value pairs for all attributes in the dataclass.
51
+ """
52
+ return self.__dict__.items()
53
+
54
+ def __getitem__(self, key):
55
+ """Support dictionary-like access."""
56
+ return getattr(self, key)
57
+
58
+ def get(self, key, default=None):
59
+ """Provide dictionary-like get method with default support."""
60
+ return getattr(self, key, default)
61
+
62
+
63
+ @dataclass
64
+ class SQLAgentState(AgentState):
65
+ """Extended state for SQL agent with query tracking."""
66
+
67
+ last_query: Optional[str] = None
68
+ query_attempts: int = 0
69
+ schema: Optional[Dict[str, List[str]]] = None
app/agent/tools.py ADDED
@@ -0,0 +1,80 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import json
2
+ import sqlite3
3
+ from pathlib import Path
4
+ from typing import Any, Callable, Dict, List
5
+
6
+ import pandas as pd
7
+ from copilotkit.langgraph import copilotkit_emit_state
8
+ from langchain_core.runnables.config import RunnableConfig
9
+ from langchain_core.tools import tool
10
+ from langchain_core.tools.base import InjectedToolCallId
11
+ from langgraph.prebuilt import InjectedState
12
+ from tenacity import retry, stop_after_attempt, wait_exponential
13
+ from typing_extensions import Annotated
14
+
15
+ # Database path
16
+ DB_PATH = Path(__file__).parent.parent.parent / "data" / "sqlite-sakila.db"
17
+
18
+
19
+ class SQLiteDatabase:
20
+ def __init__(self, db_path: Path):
21
+ self.db_path = db_path
22
+
23
+ @retry(stop=stop_after_attempt(3), wait=wait_exponential(multiplier=1, min=4, max=10))
24
+ def execute_query(self, query: str) -> pd.DataFrame:
25
+ """Execute a SQL query with retry logic."""
26
+ try:
27
+ with sqlite3.connect(self.db_path) as conn:
28
+ return pd.read_sql_query(query, conn)
29
+ except sqlite3.Error as e:
30
+ raise Exception(f"Database error: {str(e)}")
31
+ except Exception as e:
32
+ raise Exception(f"Unexpected error: {str(e)}")
33
+
34
+ def get_schema(self) -> Dict[str, List[str]]:
35
+ """Get the database schema."""
36
+ schema = {}
37
+ with sqlite3.connect(self.db_path) as conn:
38
+ cursor = conn.cursor()
39
+ cursor.execute("SELECT name FROM sqlite_master WHERE type='table';")
40
+ tables = cursor.fetchall()
41
+
42
+ for table in tables:
43
+ table_name = table[0]
44
+ cursor.execute(f"PRAGMA table_info({table_name});")
45
+ columns = cursor.fetchall()
46
+ schema[table_name] = [col[1] for col in columns]
47
+ return schema
48
+
49
+
50
+ # Initialize database
51
+ db = SQLiteDatabase(DB_PATH)
52
+
53
+
54
+ @tool(description="Get the database schema", return_direct=False)
55
+ async def get_schema(
56
+ tool_call_id: Annotated[str, InjectedToolCallId],
57
+ state: Annotated[Any, InjectedState],
58
+ ) -> str:
59
+ """Get the database schema."""
60
+ schema = db.get_schema()
61
+ return json.dumps(schema, indent=2)
62
+
63
+
64
+ @tool(description="Run a query on the database", return_direct=True)
65
+ async def run_query(
66
+ tool_call_id: Annotated[str, InjectedToolCallId],
67
+ state: Annotated[Any, InjectedState],
68
+ config: RunnableConfig,
69
+ query: str,
70
+ ) -> str:
71
+ """Run a SQL query on the database with retry logic."""
72
+ await copilotkit_emit_state(config, {"progress": "Running query..."})
73
+ try:
74
+ result = db.execute_query(query)
75
+ return result.to_json(orient="records")
76
+ except Exception as e:
77
+ return f"Error executing query: {str(e)}"
78
+
79
+
80
+ TOOLS: List[Callable[..., Any]] = [get_schema, run_query]
app/agent/utils.py ADDED
@@ -0,0 +1,67 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import re
2
+ from typing import List
3
+
4
+ from IPython.display import Image, display
5
+ from langchain.chat_models import init_chat_model
6
+ from langchain_core.language_models import BaseChatModel
7
+
8
+
9
+ def parse_inf_file(inf_path: str) -> List[str]:
10
+ """
11
+ Parse a PSCAD .inf file and extract column descriptions.
12
+
13
+ Args:
14
+ inf_path (str): Path to the .inf file
15
+
16
+ Returns:
17
+ List[str]: List of column descriptions in order
18
+ """
19
+ column_names = []
20
+ pgb_pattern = re.compile(r'PGB\(\d+\)\s+Output\s+Desc="([^"]+)"')
21
+
22
+ try:
23
+ with open(inf_path, "r") as f:
24
+ for line in f:
25
+ match = pgb_pattern.search(line)
26
+ if match:
27
+ column_names.append(match.group(1))
28
+ except Exception as e:
29
+ print(f"Error parsing .inf file {inf_path}: {e}")
30
+
31
+ return column_names
32
+
33
+
34
+ def save_graph_diagram(graph, filename="graph.png"):
35
+ """
36
+ Generate and save a graph of the given graph.
37
+
38
+ Args:
39
+ graph: The graph to be depicted.
40
+ filename: The name of the file to save the graph as.
41
+ """
42
+ try:
43
+ graph = graph.get_graph(xray=True).draw_mermaid_png()
44
+ with open(filename, "wb") as f:
45
+ f.write(graph)
46
+ display(Image(filename))
47
+ except Exception as e:
48
+ print(f"An error occurred while generating the graph: {e}")
49
+
50
+
51
+ def print_stream(stream):
52
+ for s in stream:
53
+ message = s["messages"][-1]
54
+ if isinstance(message, tuple):
55
+ print(message)
56
+ else:
57
+ message.pretty_print()
58
+
59
+
60
+ def load_chat_model(fully_specified_name: str) -> BaseChatModel:
61
+ """Load a chat model from a fully specified name.
62
+
63
+ Args:
64
+ fully_specified_name (str): String in the format 'provider/model'.
65
+ """
66
+ provider, model = fully_specified_name.split("/", maxsplit=1)
67
+ return init_chat_model(model, model_provider=provider)
app/api/__init__.py ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ """
2
+ API endpoints package.
3
+ """
app/api/__pycache__/__init__.cpython-312.pyc ADDED
Binary file (162 Bytes). View file
 
app/api/__pycache__/insights.cpython-312.pyc ADDED
Binary file (15 kB). View file
 
app/api/insights.py ADDED
@@ -0,0 +1,268 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from fastapi import APIRouter, Depends, HTTPException
2
+ from sqlalchemy import desc, distinct, func
3
+ from sqlalchemy.orm import Session
4
+
5
+ from ..db.database import get_db
6
+ from ..db.models import (
7
+ Actor,
8
+ Address,
9
+ Category,
10
+ City,
11
+ Country,
12
+ Customer,
13
+ Film,
14
+ Inventory,
15
+ Payment,
16
+ Rental,
17
+ Store,
18
+ )
19
+
20
+ router = APIRouter()
21
+
22
+
23
+ @router.get("/insights")
24
+ async def get_insights(db: Session = Depends(get_db)):
25
+ try:
26
+ # TODO: Implement insights generation logic
27
+ return {"status": "success", "insights": []}
28
+ except Exception as e:
29
+ raise HTTPException(status_code=500, detail=str(e))
30
+
31
+
32
+ @router.get("/insights/top-films")
33
+ async def get_top_films(limit: int = 10, db: Session = Depends(get_db)):
34
+ try:
35
+ # Get top films by rental count
36
+ top_films = (
37
+ db.query(
38
+ Film.title,
39
+ func.count(Rental.rental_id).label("rental_count"),
40
+ Film.rental_rate,
41
+ func.sum(Payment.amount).label("total_revenue"),
42
+ )
43
+ .join(Rental, Film.film_id == Rental.inventory_id)
44
+ .join(Payment, Rental.rental_id == Payment.rental_id)
45
+ .group_by(Film.film_id)
46
+ .order_by(desc("rental_count"))
47
+ .limit(limit)
48
+ .all()
49
+ )
50
+
51
+ return {
52
+ "status": "success",
53
+ "data": [
54
+ {
55
+ "title": film.title,
56
+ "rental_count": film.rental_count,
57
+ "rental_rate": float(film.rental_rate),
58
+ "total_revenue": float(film.total_revenue),
59
+ }
60
+ for film in top_films
61
+ ],
62
+ }
63
+ except Exception as e:
64
+ raise HTTPException(status_code=500, detail=str(e))
65
+
66
+
67
+ @router.get("/insights/category-performance")
68
+ async def get_category_performance(db: Session = Depends(get_db)):
69
+ try:
70
+ # Get performance metrics by category
71
+ category_stats = (
72
+ db.query(
73
+ Category.name,
74
+ func.count(Film.film_id).label("film_count"),
75
+ func.avg(Film.rental_rate).label("avg_rental_rate"),
76
+ func.sum(Payment.amount).label("total_revenue"),
77
+ )
78
+ .join(Film, Category.category_id == Film.film_id)
79
+ .join(Rental, Film.film_id == Rental.inventory_id)
80
+ .join(Payment, Rental.rental_id == Payment.rental_id)
81
+ .group_by(Category.category_id)
82
+ .all()
83
+ )
84
+
85
+ return {
86
+ "status": "success",
87
+ "data": [
88
+ {
89
+ "category": cat.name,
90
+ "film_count": cat.film_count,
91
+ "avg_rental_rate": float(cat.avg_rental_rate),
92
+ "total_revenue": float(cat.total_revenue),
93
+ }
94
+ for cat in category_stats
95
+ ],
96
+ }
97
+ except Exception as e:
98
+ raise HTTPException(status_code=500, detail=str(e))
99
+
100
+
101
+ @router.get("/insights/customer-activity")
102
+ async def get_customer_activity(limit: int = 10, db: Session = Depends(get_db)):
103
+ try:
104
+ # Get most active customers
105
+ active_customers = (
106
+ db.query(
107
+ Customer.first_name,
108
+ Customer.last_name,
109
+ func.count(Rental.rental_id).label("rental_count"),
110
+ func.sum(Payment.amount).label("total_spent"),
111
+ )
112
+ .join(Rental, Customer.customer_id == Rental.customer_id)
113
+ .join(Payment, Rental.rental_id == Payment.rental_id)
114
+ .group_by(Customer.customer_id)
115
+ .order_by(desc("total_spent"))
116
+ .limit(limit)
117
+ .all()
118
+ )
119
+
120
+ return {
121
+ "status": "success",
122
+ "data": [
123
+ {
124
+ "customer_name": f"{cust.first_name} {cust.last_name}",
125
+ "rental_count": cust.rental_count,
126
+ "total_spent": float(cust.total_spent),
127
+ }
128
+ for cust in active_customers
129
+ ],
130
+ }
131
+ except Exception as e:
132
+ raise HTTPException(status_code=500, detail=str(e))
133
+
134
+
135
+ @router.get("/insights/store-performance")
136
+ async def get_store_performance(db: Session = Depends(get_db)):
137
+ try:
138
+ # Get store performance metrics
139
+ store_stats = (
140
+ db.query(
141
+ Store.store_id,
142
+ func.count(Rental.rental_id).label("rental_count"),
143
+ func.sum(Payment.amount).label("total_revenue"),
144
+ func.avg(Payment.amount).label("avg_transaction"),
145
+ )
146
+ .join(Rental, Store.store_id == Rental.staff_id)
147
+ .join(Payment, Rental.rental_id == Payment.rental_id)
148
+ .group_by(Store.store_id)
149
+ .all()
150
+ )
151
+
152
+ return {
153
+ "status": "success",
154
+ "data": [
155
+ {
156
+ "store_id": store.store_id,
157
+ "rental_count": store.rental_count,
158
+ "total_revenue": float(store.total_revenue),
159
+ "avg_transaction": float(store.avg_transaction),
160
+ }
161
+ for store in store_stats
162
+ ],
163
+ }
164
+ except Exception as e:
165
+ raise HTTPException(status_code=500, detail=str(e))
166
+
167
+
168
+ @router.get("/insights/actor-popularity")
169
+ async def get_actor_popularity(limit: int = 10, db: Session = Depends(get_db)):
170
+ try:
171
+ # Get most popular actors based on film rentals
172
+ popular_actors = (
173
+ db.query(
174
+ Actor.first_name,
175
+ Actor.last_name,
176
+ func.count(Rental.rental_id).label("rental_count"),
177
+ func.sum(Payment.amount).label("total_revenue"),
178
+ )
179
+ .join(Film, Actor.actor_id == Film.film_id)
180
+ .join(Rental, Film.film_id == Rental.inventory_id)
181
+ .join(Payment, Rental.rental_id == Payment.rental_id)
182
+ .group_by(Actor.actor_id)
183
+ .order_by(desc("rental_count"))
184
+ .limit(limit)
185
+ .all()
186
+ )
187
+
188
+ return {
189
+ "status": "success",
190
+ "data": [
191
+ {
192
+ "actor_name": f"{actor.first_name} {actor.last_name}",
193
+ "rental_count": actor.rental_count,
194
+ "total_revenue": float(actor.total_revenue),
195
+ }
196
+ for actor in popular_actors
197
+ ],
198
+ }
199
+ except Exception as e:
200
+ raise HTTPException(status_code=500, detail=str(e))
201
+
202
+
203
+ @router.get("/insights/sales-overview")
204
+ async def get_sales_overview(db: Session = Depends(get_db)):
205
+ try:
206
+ # Get monthly sales data for the past year
207
+ sales_data = (
208
+ db.query(
209
+ func.strftime("%Y-%m", Payment.payment_date).label("date"),
210
+ func.sum(Payment.amount).label("Sales"),
211
+ func.sum(Payment.amount * 0.7).label("Profit"), # Assuming 70% profit margin
212
+ func.sum(Payment.amount * 0.3).label("Expenses"), # Assuming 30% expenses
213
+ func.count(distinct(Rental.customer_id)).label("Customers"),
214
+ )
215
+ .join(Rental, Payment.rental_id == Rental.rental_id)
216
+ .group_by(func.strftime("%Y-%m", Payment.payment_date))
217
+ .order_by(func.strftime("%Y-%m", Payment.payment_date))
218
+ .limit(12)
219
+ .all()
220
+ )
221
+
222
+ return {
223
+ "status": "success",
224
+ "data": [
225
+ {
226
+ "date": sale.date,
227
+ "Sales": float(sale.Sales),
228
+ "Profit": float(sale.Profit),
229
+ "Expenses": float(sale.Expenses),
230
+ "Customers": sale.Customers,
231
+ }
232
+ for sale in sales_data
233
+ ],
234
+ }
235
+ except Exception as e:
236
+ raise HTTPException(status_code=500, detail=str(e))
237
+
238
+
239
+ @router.get("/insights/regional-sales")
240
+ async def get_regional_sales(db: Session = Depends(get_db)):
241
+ try:
242
+ # Get sales data by country
243
+ regional_data = (
244
+ db.query(
245
+ Country.country.label("region"),
246
+ func.sum(Payment.amount).label("sales"),
247
+ func.count(distinct(Rental.customer_id)).label("marketShare"),
248
+ )
249
+ .join(City, Country.country_id == City.country_id)
250
+ .join(Address, City.city_id == Address.city_id)
251
+ .join(Store, Address.address_id == Store.address_id)
252
+ .join(Inventory, Store.store_id == Inventory.store_id)
253
+ .join(Rental, Inventory.inventory_id == Rental.inventory_id)
254
+ .join(Payment, Rental.rental_id == Payment.rental_id)
255
+ .group_by(Country.country)
256
+ .order_by(func.sum(Payment.amount).desc())
257
+ .all()
258
+ )
259
+
260
+ return {
261
+ "status": "success",
262
+ "data": [
263
+ {"region": region.region, "sales": float(region.sales), "marketShare": region.marketShare}
264
+ for region in regional_data
265
+ ],
266
+ }
267
+ except Exception as e:
268
+ raise HTTPException(status_code=500, detail=str(e))
app/api/query.py ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import Any, Dict
2
+
3
+ from fastapi import APIRouter, Depends, HTTPException
4
+ from sqlalchemy.orm import Session
5
+
6
+ from ..db.database import get_db
7
+
8
+ router = APIRouter()
9
+
10
+
11
+ @router.post("/query")
12
+ async def process_query(query: Dict[str, Any], db: Session = Depends(get_db)):
13
+ try:
14
+ # TODO: Implement query processing logic
15
+ return {"status": "success", "message": "Query received"}
16
+ except Exception as e:
17
+ raise HTTPException(status_code=500, detail=str(e))
app/db/__init__.py ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ """
2
+ Database package.
3
+ """
app/db/__pycache__/__init__.cpython-312.pyc ADDED
Binary file (156 Bytes). View file
 
app/db/__pycache__/database.cpython-312.pyc ADDED
Binary file (1.42 kB). View file
 
app/db/__pycache__/models.cpython-312.pyc ADDED
Binary file (10.3 kB). View file
 
app/db/database.py ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+
3
+ from sqlalchemy import create_engine
4
+ from sqlalchemy.ext.declarative import declarative_base
5
+ from sqlalchemy.orm import sessionmaker
6
+
7
+ # Get the absolute path to the data directory
8
+ BASE_DIR = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
9
+ DATABASE_PATH = os.path.join(BASE_DIR, "data", "sqlite-sakila.db")
10
+
11
+ # Ensure the data directory exists
12
+ os.makedirs(os.path.dirname(DATABASE_PATH), exist_ok=True)
13
+
14
+ SQLALCHEMY_DATABASE_URL = f"sqlite:///{DATABASE_PATH}"
15
+
16
+ engine = create_engine(SQLALCHEMY_DATABASE_URL, connect_args={"check_same_thread": False})
17
+ SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
18
+
19
+ Base = declarative_base()
20
+
21
+ # Dependency
22
+ def get_db():
23
+ db = SessionLocal()
24
+ try:
25
+ yield db
26
+ finally:
27
+ db.close()
app/db/models.py ADDED
@@ -0,0 +1,211 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from sqlalchemy import (
2
+ Boolean,
3
+ Column,
4
+ DateTime,
5
+ ForeignKey,
6
+ Integer,
7
+ Numeric,
8
+ SmallInteger,
9
+ String,
10
+ Text,
11
+ )
12
+ from sqlalchemy.orm import relationship
13
+
14
+ from .database import Base
15
+
16
+
17
+ class Actor(Base):
18
+ __tablename__ = "actor"
19
+
20
+ actor_id = Column(Integer, primary_key=True)
21
+ first_name = Column(String(45), nullable=False)
22
+ last_name = Column(String(45), nullable=False)
23
+ last_update = Column(DateTime, nullable=False)
24
+ films = relationship("Film", secondary="film_actor", back_populates="actors")
25
+
26
+
27
+ class Category(Base):
28
+ __tablename__ = "category"
29
+
30
+ category_id = Column(SmallInteger, primary_key=True)
31
+ name = Column(String(25), nullable=False)
32
+ last_update = Column(DateTime, nullable=False)
33
+ films = relationship("Film", secondary="film_category", back_populates="categories")
34
+
35
+
36
+ class Film(Base):
37
+ __tablename__ = "film"
38
+
39
+ film_id = Column(Integer, primary_key=True)
40
+ title = Column(String(255), nullable=False)
41
+ description = Column(Text)
42
+ release_year = Column(String(4))
43
+ language_id = Column(SmallInteger, ForeignKey("language.language_id"), nullable=False)
44
+ original_language_id = Column(SmallInteger, ForeignKey("language.language_id"))
45
+ rental_duration = Column(SmallInteger, nullable=False, default=3)
46
+ rental_rate = Column(Numeric(4, 2), nullable=False, default=4.99)
47
+ length = Column(SmallInteger)
48
+ replacement_cost = Column(Numeric(5, 2), nullable=False, default=19.99)
49
+ rating = Column(String(10), default="G")
50
+ special_features = Column(String(100))
51
+ last_update = Column(DateTime, nullable=False)
52
+ language = relationship("Language", foreign_keys=[language_id])
53
+ original_language = relationship("Language", foreign_keys=[original_language_id])
54
+ inventory = relationship("Inventory", back_populates="film")
55
+ actors = relationship("Actor", secondary="film_actor", back_populates="films")
56
+ categories = relationship("Category", secondary="film_category", back_populates="films")
57
+
58
+
59
+ class Customer(Base):
60
+ __tablename__ = "customer"
61
+
62
+ customer_id = Column(Integer, primary_key=True)
63
+ store_id = Column(Integer, ForeignKey("store.store_id"), nullable=False)
64
+ first_name = Column(String(45), nullable=False)
65
+ last_name = Column(String(45), nullable=False)
66
+ email = Column(String(50))
67
+ address_id = Column(Integer, ForeignKey("address.address_id"), nullable=False)
68
+ active = Column(Boolean, nullable=False, default=True)
69
+ create_date = Column(DateTime, nullable=False)
70
+ last_update = Column(DateTime, nullable=False)
71
+ store = relationship("Store", back_populates="customers")
72
+ address = relationship("Address", back_populates="customers")
73
+ rentals = relationship("Rental", back_populates="customer")
74
+ payments = relationship("Payment", back_populates="customer")
75
+
76
+
77
+ class Store(Base):
78
+ __tablename__ = "store"
79
+
80
+ store_id = Column(Integer, primary_key=True)
81
+ manager_staff_id = Column(SmallInteger, ForeignKey("staff.staff_id"), nullable=False)
82
+ address_id = Column(Integer, ForeignKey("address.address_id"), nullable=False)
83
+ last_update = Column(DateTime, nullable=False)
84
+ address = relationship("Address", back_populates="stores")
85
+ manager = relationship("Staff", foreign_keys=[manager_staff_id])
86
+ inventory = relationship("Inventory", back_populates="store")
87
+ customers = relationship("Customer", back_populates="store")
88
+
89
+
90
+ class Rental(Base):
91
+ __tablename__ = "rental"
92
+
93
+ rental_id = Column(Integer, primary_key=True)
94
+ rental_date = Column(DateTime, nullable=False)
95
+ inventory_id = Column(Integer, ForeignKey("inventory.inventory_id"), nullable=False)
96
+ customer_id = Column(Integer, ForeignKey("customer.customer_id"), nullable=False)
97
+ return_date = Column(DateTime)
98
+ staff_id = Column(SmallInteger, ForeignKey("staff.staff_id"), nullable=False)
99
+ last_update = Column(DateTime, nullable=False)
100
+ inventory = relationship("Inventory", back_populates="rentals")
101
+ customer = relationship("Customer", back_populates="rentals")
102
+ staff = relationship("Staff", back_populates="rentals")
103
+ payments = relationship("Payment", back_populates="rental")
104
+
105
+
106
+ class Payment(Base):
107
+ __tablename__ = "payment"
108
+
109
+ payment_id = Column(Integer, primary_key=True)
110
+ customer_id = Column(Integer, ForeignKey("customer.customer_id"), nullable=False)
111
+ staff_id = Column(SmallInteger, ForeignKey("staff.staff_id"), nullable=False)
112
+ rental_id = Column(Integer, ForeignKey("rental.rental_id"))
113
+ amount = Column(Numeric(5, 2), nullable=False)
114
+ payment_date = Column(DateTime, nullable=False)
115
+ last_update = Column(DateTime, nullable=False)
116
+ customer = relationship("Customer", back_populates="payments")
117
+ staff = relationship("Staff", back_populates="payments")
118
+ rental = relationship("Rental", back_populates="payments")
119
+
120
+
121
+ class Country(Base):
122
+ __tablename__ = "country"
123
+
124
+ country_id = Column(SmallInteger, primary_key=True)
125
+ country = Column(String(50), nullable=False)
126
+ last_update = Column(DateTime)
127
+ cities = relationship("City", back_populates="country")
128
+
129
+
130
+ class City(Base):
131
+ __tablename__ = "city"
132
+
133
+ city_id = Column(Integer, primary_key=True)
134
+ city = Column(String(50), nullable=False)
135
+ country_id = Column(SmallInteger, ForeignKey("country.country_id"), nullable=False)
136
+ last_update = Column(DateTime, nullable=False)
137
+ country = relationship("Country", back_populates="cities")
138
+ addresses = relationship("Address", back_populates="city")
139
+
140
+
141
+ class Address(Base):
142
+ __tablename__ = "address"
143
+
144
+ address_id = Column(Integer, primary_key=True)
145
+ address = Column(String(50), nullable=False)
146
+ address2 = Column(String(50))
147
+ district = Column(String(20), nullable=False)
148
+ city_id = Column(Integer, ForeignKey("city.city_id"), nullable=False)
149
+ postal_code = Column(String(10))
150
+ phone = Column(String(20), nullable=False)
151
+ last_update = Column(DateTime, nullable=False)
152
+ city = relationship("City", back_populates="addresses")
153
+ stores = relationship("Store", back_populates="address")
154
+ customers = relationship("Customer", back_populates="address")
155
+ staff = relationship("Staff", back_populates="address")
156
+
157
+
158
+ class Staff(Base):
159
+ __tablename__ = "staff"
160
+
161
+ staff_id = Column(SmallInteger, primary_key=True)
162
+ first_name = Column(String(45), nullable=False)
163
+ last_name = Column(String(45), nullable=False)
164
+ address_id = Column(Integer, ForeignKey("address.address_id"), nullable=False)
165
+ picture = Column(Text)
166
+ email = Column(String(50))
167
+ store_id = Column(Integer, ForeignKey("store.store_id"), nullable=False)
168
+ active = Column(SmallInteger, nullable=False, default=1)
169
+ username = Column(String(16), nullable=False)
170
+ password = Column(String(40))
171
+ last_update = Column(DateTime, nullable=False)
172
+ address = relationship("Address", back_populates="staff")
173
+ store = relationship("Store", foreign_keys=[store_id])
174
+ rentals = relationship("Rental", back_populates="staff")
175
+ payments = relationship("Payment", back_populates="staff")
176
+
177
+
178
+ class Language(Base):
179
+ __tablename__ = "language"
180
+
181
+ language_id = Column(SmallInteger, primary_key=True)
182
+ name = Column(String(20), nullable=False)
183
+ last_update = Column(DateTime, nullable=False)
184
+
185
+
186
+ class Inventory(Base):
187
+ __tablename__ = "inventory"
188
+
189
+ inventory_id = Column(Integer, primary_key=True)
190
+ film_id = Column(Integer, ForeignKey("film.film_id"), nullable=False)
191
+ store_id = Column(Integer, ForeignKey("store.store_id"), nullable=False)
192
+ last_update = Column(DateTime, nullable=False)
193
+ film = relationship("Film", back_populates="inventory")
194
+ store = relationship("Store", back_populates="inventory")
195
+ rentals = relationship("Rental", back_populates="inventory")
196
+
197
+
198
+ class FilmActor(Base):
199
+ __tablename__ = "film_actor"
200
+
201
+ actor_id = Column(Integer, ForeignKey("actor.actor_id"), primary_key=True)
202
+ film_id = Column(Integer, ForeignKey("film.film_id"), primary_key=True)
203
+ last_update = Column(DateTime, nullable=False)
204
+
205
+
206
+ class FilmCategory(Base):
207
+ __tablename__ = "film_category"
208
+
209
+ film_id = Column(Integer, ForeignKey("film.film_id"), primary_key=True)
210
+ category_id = Column(SmallInteger, ForeignKey("category.category_id"), primary_key=True)
211
+ last_update = Column(DateTime, nullable=False)
app/main.py ADDED
@@ -0,0 +1,64 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import uvicorn
2
+ from copilotkit import CopilotKitRemoteEndpoint, LangGraphAgent
3
+ from copilotkit.integrations.fastapi import add_fastapi_endpoint
4
+ from fastapi import FastAPI
5
+ from fastapi.middleware.cors import CORSMiddleware
6
+
7
+ from .agent.graph import graph
8
+ from .api import insights
9
+ from .db.database import Base, engine
10
+
11
+ # Create database tables
12
+ Base.metadata.create_all(bind=engine)
13
+
14
+ app = FastAPI(
15
+ title="InsightCopilot API", description="API for extracting insights from the Sakila database", version="1.0.0"
16
+ )
17
+
18
+ # Configure CORS
19
+ app.add_middleware(
20
+ CORSMiddleware,
21
+ allow_origins=["*"], # In production, replace with specific origins
22
+ allow_credentials=True,
23
+ allow_methods=["*"],
24
+ allow_headers=["*"],
25
+ )
26
+
27
+ # Initialize CopilotKit SDK
28
+ sdk = CopilotKitRemoteEndpoint(
29
+ agents=[
30
+ LangGraphAgent(
31
+ name="insight_copilot_agent",
32
+ description="A copilot agent that can extract insights from the Sakila database",
33
+ graph=graph,
34
+ )
35
+ ],
36
+ )
37
+
38
+ # Add CopilotKit endpoint
39
+ add_fastapi_endpoint(app, sdk, "/copilotkit", use_thread_pool=False)
40
+
41
+ # Include routers
42
+ app.include_router(insights.router, prefix="/api/v1", tags=["insights"])
43
+
44
+
45
+ @app.get("/")
46
+ async def root():
47
+ return {
48
+ "message": "Welcome to InsightCopilot API",
49
+ "version": "1.0.0",
50
+ "docs_url": "/docs",
51
+ "endpoints": {
52
+ "insights": {
53
+ "top_films": "/api/v1/insights/top-films",
54
+ "category_performance": "/api/v1/insights/category-performance",
55
+ "customer_activity": "/api/v1/insights/customer-activity",
56
+ "store_performance": "/api/v1/insights/store-performance",
57
+ "actor_popularity": "/api/v1/insights/actor-popularity",
58
+ }
59
+ },
60
+ }
61
+
62
+
63
+ if __name__ == "__main__":
64
+ uvicorn.run(app, host="0.0.0.0", port=8000)
app/utils/helpers.py ADDED
@@ -0,0 +1,23 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import Any, Dict, List
2
+
3
+ import pandas as pd
4
+
5
+
6
+ def process_data(data: List[Dict[str, Any]]) -> pd.DataFrame:
7
+ """Convert list of dictionaries to pandas DataFrame."""
8
+ return pd.DataFrame(data)
9
+
10
+
11
+ def generate_sql_query(natural_language_query: str) -> str:
12
+ """Convert natural language query to SQL query."""
13
+ # TODO: Implement query generation logic
14
+ return "SELECT * FROM data LIMIT 10"
15
+
16
+
17
+ def format_response(data: pd.DataFrame) -> Dict[str, Any]:
18
+ """Format DataFrame response for API."""
19
+ return {
20
+ "columns": data.columns.tolist(),
21
+ "data": data.to_dict(orient="records"),
22
+ "summary": {"row_count": len(data), "column_count": len(data.columns)},
23
+ }
data/README.txt ADDED
@@ -0,0 +1,47 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ### Database Description
2
+ The Sakila sample database is a fictitious database designed to represent a DVD rental store. The tables of the database include film, film_category, actor, customer, rental, payment and inventory among others.
3
+ The Sakila sample database is intended to provide a standard schema that can be used for examples in books, tutorials, articles, samples, and so forth.
4
+ Detailed information about the database can be found on the MySQL website: https://dev.mysql.com/doc/sakila/en/
5
+
6
+ Sakila for SQLite is a part of the sakila-sample-database-ports project intended to provide ported versions of the original MySQL database for other database systems, including:
7
+
8
+ Oracle
9
+ SQL Server
10
+ SQLIte
11
+ Interbase/Firebird
12
+ Microsoft Access
13
+
14
+ Sakila for SQLite is a port of the Sakila example database available for MySQL, which was originally developed by Mike Hillyer of the MySQL AB documentation team.
15
+ This project is designed to help database administrators to decide which database to use for development of new products
16
+ The user can run the same SQL against different kind of databases and compare the performance
17
+
18
+ License: BSD
19
+ Copyright DB Software Laboratory
20
+ http://www.etl-tools.com
21
+
22
+ Note:
23
+
24
+ Part of the insert scripts were generated by Advanced ETL Processor
25
+ http://www.etl-tools.com/etl-tools/advanced-etl-processor-enterprise/overview.html
26
+
27
+ Information about the project and the downloadable files can be found at:
28
+ https://code.google.com/archive/p/sakila-sample-database-ports/
29
+
30
+ Other versions and developments of the project can be found at:
31
+ https://github.com/ivanceras/sakila/tree/master/sqlite-sakila-db
32
+
33
+ https://github.com/jOOQ/jOOQ/tree/main/jOOQ-examples/Sakila
34
+
35
+ Direct access to the MySQL Sakila database, which does not require installation of MySQL (queries can be typed directly in the browser), is provided on the phpMyAdmin demo version website:
36
+ https://demo.phpmyadmin.net/master-config/
37
+
38
+ ### Files Description
39
+ The files in the sqlite-sakila-db folder are the script files which can be used to generate the SQLite version of the database.
40
+ For convenience, the script files have already been run in cmd to generate the sqlite-sakila.db file, as follows:
41
+
42
+ `sqlite> .open sqlite-sakila.db` # creates the .db file
43
+ `sqlite> .read sqlite-sakila-schema.sql` # creates the database schema
44
+ `sqlite> .read sqlite-sakila-insert-data.sql` # inserts the data
45
+
46
+ Therefore, the sqlite-sakila.db file can be directly loaded into SQLite3 and queries can be directly executed.
47
+ Note: Data about the film_text table is not provided in the script files, thus the film_text table is empty. Instead the film_id, title and description fields are included in the film table.
data/sqlite-sakila-db/ReadME.txt ADDED
@@ -0,0 +1,16 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Sakila for SQLite is a port of the Sakila example database available for MySQL, which was originally developed by Mike Hillyer of the MySQL AB documentation team.
2
+ This project is designed to help database administrators to decide which database to use for development of new products
3
+ The user can run the same SQL against different kind of databases and compare the performance
4
+
5
+ License: BSD
6
+ Copyright DB Software Laboratory
7
+ http://www.etl-tools.com
8
+
9
+ Note:
10
+
11
+ Part of the insert scripts were generated by Advanced ETL Processor
12
+ http://www.etl-tools.com/etl-tools/advanced-etl-processor-enterprise/overview.html
13
+
14
+ To run SQL sripts use Database browser
15
+ Create new connection and set delimiter to ;
16
+ http://www.etl-tools.com/database-editors/database-browser/overview.html
data/sqlite-sakila-db/sqlite-sakila-delete-data.sql ADDED
@@ -0,0 +1,45 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ /*
2
+
3
+ Sakila for SQLite is a port of the Sakila example database available for MySQL, which was originally developed by Mike Hillyer of the MySQL AB documentation team.
4
+ This project is designed to help database administrators to decide which database to use for development of new products
5
+ The user can run the same SQL against different kind of databases and compare the performance
6
+
7
+ License: BSD
8
+ Copyright DB Software Laboratory
9
+ http://www.etl-tools.com
10
+
11
+ */
12
+
13
+ -- Delete data
14
+ DELETE FROM payment
15
+ ;
16
+ DELETE FROM rental
17
+ ;
18
+ DELETE FROM customer
19
+ ;
20
+ DELETE FROM film_category
21
+ ;
22
+ DELETE FROM film_text
23
+ ;
24
+ DELETE FROM film_actor
25
+ ;
26
+ DELETE FROM inventory
27
+ ;
28
+ DELETE FROM film
29
+ ;
30
+ DELETE FROM category
31
+ ;
32
+ DELETE FROM staff
33
+ ;
34
+ DELETE FROM store
35
+ ;
36
+ DELETE FROM actor
37
+ ;
38
+ DELETE FROM address
39
+ ;
40
+ DELETE FROM city
41
+ ;
42
+ DELETE FROM country
43
+ ;
44
+ DELETE FROM language
45
+ ;
data/sqlite-sakila-db/sqlite-sakila-drop-objects.sql ADDED
@@ -0,0 +1,68 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ /*
2
+
3
+ Sakila for SQLite is a port of the Sakila example database available for MySQL, which was originally developed by Mike Hillyer of the MySQL AB documentation team.
4
+ This project is designed to help database administrators to decide which database to use for development of new products
5
+ The user can run the same SQL against different kind of databases and compare the performance
6
+
7
+ License: BSD
8
+ Copyright DB Software Laboratory
9
+ http://www.etl-tools.com
10
+
11
+ */
12
+
13
+ -- Drop Views
14
+
15
+ DROP VIEW customer_list
16
+ ;
17
+ DROP VIEW film_list
18
+ ;
19
+ --DROP VIEW nicer_but_slower_film_list;
20
+ DROP VIEW sales_by_film_category
21
+ ;
22
+ DROP VIEW sales_by_store
23
+ ;
24
+ DROP VIEW staff_list
25
+ ;
26
+
27
+ -- Drop Tables
28
+
29
+ DROP TABLE payment
30
+ ;
31
+ DROP TABLE rental
32
+ ;
33
+ DROP TABLE inventory
34
+ ;
35
+ DROP TABLE film_text
36
+ ;
37
+ DROP TABLE film_category
38
+ ;
39
+ DROP TABLE film_actor
40
+ ;
41
+ DROP TABLE film
42
+ ;
43
+ DROP TABLE language
44
+ ;
45
+ DROP TABLE customer
46
+ ;
47
+ DROP TABLE actor
48
+ ;
49
+ DROP TABLE category
50
+ ;
51
+ DROP TABLE store
52
+ ;
53
+ DROP TABLE address
54
+ ;
55
+ DROP TABLE staff
56
+ ;
57
+ DROP TABLE city
58
+ ;
59
+ DROP TABLE country
60
+ ;
61
+
62
+ -- Procedures and views
63
+ --drop procedure film_in_stock;
64
+ --drop procedure film_not_in_stock;
65
+ --drop function get_customer_balance;
66
+ --drop function inventory_held_by_customer;
67
+ --drop function inventory_in_stock;
68
+ --drop procedure rewards_report;
data/sqlite-sakila-db/sqlite-sakila-insert-data.sql ADDED
The diff for this file is too large to render. See raw diff
 
data/sqlite-sakila-db/sqlite-sakila-schema.sql ADDED
@@ -0,0 +1,644 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ /*
2
+
3
+ Sakila for SQLite is a port of the Sakila example database available for MySQL, which was originally developed by Mike Hillyer of the MySQL AB documentation team.
4
+ This project is designed to help database administrators to decide which database to use for development of new products
5
+ The user can run the same SQL against different kind of databases and compare the performance
6
+
7
+ License: BSD
8
+ Copyright DB Software Laboratory
9
+ http://www.etl-tools.com
10
+
11
+ */
12
+
13
+ --
14
+ -- Table structure for table actor
15
+ --
16
+ --DROP TABLE actor;
17
+
18
+ CREATE TABLE actor (
19
+ actor_id numeric NOT NULL ,
20
+ first_name VARCHAR(45) NOT NULL,
21
+ last_name VARCHAR(45) NOT NULL,
22
+ last_update TIMESTAMP NOT NULL,
23
+ PRIMARY KEY (actor_id)
24
+ )
25
+ ;
26
+
27
+ CREATE INDEX idx_actor_last_name ON actor(last_name)
28
+ ;
29
+
30
+ CREATE TRIGGER actor_trigger_ai AFTER INSERT ON actor
31
+ BEGIN
32
+ UPDATE actor SET last_update = DATETIME('NOW') WHERE rowid = new.rowid;
33
+ END
34
+ ;
35
+
36
+ CREATE TRIGGER actor_trigger_au AFTER UPDATE ON actor
37
+ BEGIN
38
+ UPDATE actor SET last_update = DATETIME('NOW') WHERE rowid = new.rowid;
39
+ END
40
+ ;
41
+
42
+ --
43
+ -- Table structure for table country
44
+ --
45
+
46
+ CREATE TABLE country (
47
+ country_id SMALLINT NOT NULL,
48
+ country VARCHAR(50) NOT NULL,
49
+ last_update TIMESTAMP,
50
+ PRIMARY KEY (country_id)
51
+ )
52
+ ;
53
+
54
+ CREATE TRIGGER country_trigger_ai AFTER INSERT ON country
55
+ BEGIN
56
+ UPDATE country SET last_update = DATETIME('NOW') WHERE rowid = new.rowid;
57
+ END
58
+ ;
59
+
60
+ CREATE TRIGGER country_trigger_au AFTER UPDATE ON country
61
+ BEGIN
62
+ UPDATE country SET last_update = DATETIME('NOW') WHERE rowid = new.rowid;
63
+ END
64
+ ;
65
+
66
+ --
67
+ -- Table structure for table city
68
+ --
69
+
70
+ CREATE TABLE city (
71
+ city_id int NOT NULL,
72
+ city VARCHAR(50) NOT NULL,
73
+ country_id SMALLINT NOT NULL,
74
+ last_update TIMESTAMP NOT NULL,
75
+ PRIMARY KEY (city_id),
76
+ CONSTRAINT fk_city_country FOREIGN KEY (country_id) REFERENCES country (country_id) ON DELETE NO ACTION ON UPDATE CASCADE
77
+ )
78
+ ;
79
+ CREATE INDEX idx_fk_country_id ON city(country_id)
80
+ ;
81
+
82
+ CREATE TRIGGER city_trigger_ai AFTER INSERT ON city
83
+ BEGIN
84
+ UPDATE city SET last_update = DATETIME('NOW') WHERE rowid = new.rowid;
85
+ END
86
+ ;
87
+
88
+ CREATE TRIGGER city_trigger_au AFTER UPDATE ON city
89
+ BEGIN
90
+ UPDATE city SET last_update = DATETIME('NOW') WHERE rowid = new.rowid;
91
+ END
92
+ ;
93
+
94
+ --
95
+ -- Table structure for table address
96
+ --
97
+
98
+ CREATE TABLE address (
99
+ address_id int NOT NULL,
100
+ address VARCHAR(50) NOT NULL,
101
+ address2 VARCHAR(50) DEFAULT NULL,
102
+ district VARCHAR(20) NOT NULL,
103
+ city_id INT NOT NULL,
104
+ postal_code VARCHAR(10) DEFAULT NULL,
105
+ phone VARCHAR(20) NOT NULL,
106
+ last_update TIMESTAMP NOT NULL,
107
+ PRIMARY KEY (address_id),
108
+ CONSTRAINT fk_address_city FOREIGN KEY (city_id) REFERENCES city (city_id) ON DELETE NO ACTION ON UPDATE CASCADE
109
+ )
110
+ ;
111
+
112
+ CREATE INDEX idx_fk_city_id ON address(city_id)
113
+ ;
114
+
115
+ CREATE TRIGGER address_trigger_ai AFTER INSERT ON address
116
+ BEGIN
117
+ UPDATE address SET last_update = DATETIME('NOW') WHERE rowid = new.rowid;
118
+ END
119
+ ;
120
+
121
+ CREATE TRIGGER address_trigger_au AFTER UPDATE ON address
122
+ BEGIN
123
+ UPDATE address SET last_update = DATETIME('NOW') WHERE rowid = new.rowid;
124
+ END
125
+ ;
126
+
127
+ --
128
+ -- Table structure for table language
129
+ --
130
+
131
+ CREATE TABLE language (
132
+ language_id SMALLINT NOT NULL ,
133
+ name CHAR(20) NOT NULL,
134
+ last_update TIMESTAMP NOT NULL,
135
+ PRIMARY KEY (language_id)
136
+ )
137
+ ;
138
+
139
+ CREATE TRIGGER language_trigger_ai AFTER INSERT ON language
140
+ BEGIN
141
+ UPDATE language SET last_update = DATETIME('NOW') WHERE rowid = new.rowid;
142
+ END
143
+ ;
144
+
145
+ CREATE TRIGGER language_trigger_au AFTER UPDATE ON language
146
+ BEGIN
147
+ UPDATE language SET last_update = DATETIME('NOW') WHERE rowid = new.rowid;
148
+ END
149
+ ;
150
+
151
+ --
152
+ -- Table structure for table category
153
+ --
154
+
155
+ CREATE TABLE category (
156
+ category_id SMALLINT NOT NULL,
157
+ name VARCHAR(25) NOT NULL,
158
+ last_update TIMESTAMP NOT NULL,
159
+ PRIMARY KEY (category_id)
160
+ );
161
+
162
+ CREATE TRIGGER category_trigger_ai AFTER INSERT ON category
163
+ BEGIN
164
+ UPDATE category SET last_update = DATETIME('NOW') WHERE rowid = new.rowid;
165
+ END
166
+ ;
167
+
168
+ CREATE TRIGGER category_trigger_au AFTER UPDATE ON category
169
+ BEGIN
170
+ UPDATE category SET last_update = DATETIME('NOW') WHERE rowid = new.rowid;
171
+ END
172
+ ;
173
+
174
+ --
175
+ -- Table structure for table customer
176
+ --
177
+
178
+ CREATE TABLE customer (
179
+ customer_id INT NOT NULL,
180
+ store_id INT NOT NULL,
181
+ first_name VARCHAR(45) NOT NULL,
182
+ last_name VARCHAR(45) NOT NULL,
183
+ email VARCHAR(50) DEFAULT NULL,
184
+ address_id INT NOT NULL,
185
+ active CHAR(1) DEFAULT 'Y' NOT NULL,
186
+ create_date TIMESTAMP NOT NULL,
187
+ last_update TIMESTAMP NOT NULL,
188
+ PRIMARY KEY (customer_id),
189
+ CONSTRAINT fk_customer_store FOREIGN KEY (store_id) REFERENCES store (store_id) ON DELETE NO ACTION ON UPDATE CASCADE,
190
+ CONSTRAINT fk_customer_address FOREIGN KEY (address_id) REFERENCES address (address_id) ON DELETE NO ACTION ON UPDATE CASCADE
191
+ )
192
+ ;
193
+
194
+ CREATE INDEX idx_customer_fk_store_id ON customer(store_id)
195
+ ;
196
+ CREATE INDEX idx_customer_fk_address_id ON customer(address_id)
197
+ ;
198
+ CREATE INDEX idx_customer_last_name ON customer(last_name)
199
+ ;
200
+
201
+ CREATE TRIGGER customer_trigger_ai AFTER INSERT ON customer
202
+ BEGIN
203
+ UPDATE customer SET last_update = DATETIME('NOW') WHERE rowid = new.rowid;
204
+ END
205
+ ;
206
+
207
+ CREATE TRIGGER customer_trigger_au AFTER UPDATE ON customer
208
+ BEGIN
209
+ UPDATE customer SET last_update = DATETIME('NOW') WHERE rowid = new.rowid;
210
+ END
211
+ ;
212
+
213
+ --
214
+ -- Table structure for table film
215
+ --
216
+
217
+ CREATE TABLE film (
218
+ film_id int NOT NULL,
219
+ title VARCHAR(255) NOT NULL,
220
+ description BLOB SUB_TYPE TEXT DEFAULT NULL,
221
+ release_year VARCHAR(4) DEFAULT NULL,
222
+ language_id SMALLINT NOT NULL,
223
+ original_language_id SMALLINT DEFAULT NULL,
224
+ rental_duration SMALLINT DEFAULT 3 NOT NULL,
225
+ rental_rate DECIMAL(4,2) DEFAULT 4.99 NOT NULL,
226
+ length SMALLINT DEFAULT NULL,
227
+ replacement_cost DECIMAL(5,2) DEFAULT 19.99 NOT NULL,
228
+ rating VARCHAR(10) DEFAULT 'G',
229
+ special_features VARCHAR(100) DEFAULT NULL,
230
+ last_update TIMESTAMP NOT NULL,
231
+ PRIMARY KEY (film_id),
232
+ CONSTRAINT CHECK_special_features CHECK(special_features is null or
233
+ special_features like '%Trailers%' or
234
+ special_features like '%Commentaries%' or
235
+ special_features like '%Deleted Scenes%' or
236
+ special_features like '%Behind the Scenes%'),
237
+ CONSTRAINT CHECK_special_rating CHECK(rating in ('G','PG','PG-13','R','NC-17')),
238
+ CONSTRAINT fk_film_language FOREIGN KEY (language_id) REFERENCES language (language_id) ,
239
+ CONSTRAINT fk_film_language_original FOREIGN KEY (original_language_id) REFERENCES language (language_id)
240
+ )
241
+ ;
242
+ CREATE INDEX idx_fk_language_id ON film(language_id)
243
+ ;
244
+ CREATE INDEX idx_fk_original_language_id ON film(original_language_id)
245
+ ;
246
+
247
+ CREATE TRIGGER film_trigger_ai AFTER INSERT ON film
248
+ BEGIN
249
+ UPDATE film SET last_update = DATETIME('NOW') WHERE rowid = new.rowid;
250
+ END
251
+ ;
252
+
253
+ CREATE TRIGGER film_trigger_au AFTER UPDATE ON film
254
+ BEGIN
255
+ UPDATE film SET last_update = DATETIME('NOW') WHERE rowid = new.rowid;
256
+ END
257
+ ;
258
+
259
+ --
260
+ -- Table structure for table film_actor
261
+ --
262
+
263
+ CREATE TABLE film_actor (
264
+ actor_id INT NOT NULL,
265
+ film_id INT NOT NULL,
266
+ last_update TIMESTAMP NOT NULL,
267
+ PRIMARY KEY (actor_id,film_id),
268
+ CONSTRAINT fk_film_actor_actor FOREIGN KEY (actor_id) REFERENCES actor (actor_id) ON DELETE NO ACTION ON UPDATE CASCADE,
269
+ CONSTRAINT fk_film_actor_film FOREIGN KEY (film_id) REFERENCES film (film_id) ON DELETE NO ACTION ON UPDATE CASCADE
270
+ )
271
+ ;
272
+
273
+ CREATE INDEX idx_fk_film_actor_film ON film_actor(film_id)
274
+ ;
275
+
276
+ CREATE INDEX idx_fk_film_actor_actor ON film_actor(actor_id)
277
+ ;
278
+
279
+ CREATE TRIGGER film_actor_trigger_ai AFTER INSERT ON film_actor
280
+ BEGIN
281
+ UPDATE film_actor SET last_update = DATETIME('NOW') WHERE rowid = new.rowid;
282
+ END
283
+ ;
284
+
285
+ CREATE TRIGGER film_actor_trigger_au AFTER UPDATE ON film_actor
286
+ BEGIN
287
+ UPDATE film_actor SET last_update = DATETIME('NOW') WHERE rowid = new.rowid;
288
+ END
289
+ ;
290
+
291
+
292
+ --
293
+ -- Table structure for table film_category
294
+ --
295
+
296
+ CREATE TABLE film_category (
297
+ film_id INT NOT NULL,
298
+ category_id SMALLINT NOT NULL,
299
+ last_update TIMESTAMP NOT NULL,
300
+ PRIMARY KEY (film_id, category_id),
301
+ CONSTRAINT fk_film_category_film FOREIGN KEY (film_id) REFERENCES film (film_id) ON DELETE NO ACTION ON UPDATE CASCADE,
302
+ CONSTRAINT fk_film_category_category FOREIGN KEY (category_id) REFERENCES category (category_id) ON DELETE NO ACTION ON UPDATE CASCADE
303
+ )
304
+ ;
305
+
306
+ CREATE INDEX idx_fk_film_category_film ON film_category(film_id)
307
+ ;
308
+
309
+ CREATE INDEX idx_fk_film_category_category ON film_category(category_id)
310
+ ;
311
+
312
+ CREATE TRIGGER film_category_trigger_ai AFTER INSERT ON film_category
313
+ BEGIN
314
+ UPDATE film_category SET last_update = DATETIME('NOW') WHERE rowid = new.rowid;
315
+ END
316
+ ;
317
+
318
+ CREATE TRIGGER film_category_trigger_au AFTER UPDATE ON film_category
319
+ BEGIN
320
+ UPDATE film_category SET last_update = DATETIME('NOW') WHERE rowid = new.rowid;
321
+ END
322
+ ;
323
+
324
+ --
325
+ -- Table structure for table film_text
326
+ --
327
+
328
+ CREATE TABLE film_text (
329
+ film_id SMALLINT NOT NULL,
330
+ title VARCHAR(255) NOT NULL,
331
+ description BLOB SUB_TYPE TEXT,
332
+ PRIMARY KEY (film_id)
333
+ )
334
+ ;
335
+
336
+ --
337
+ -- Table structure for table inventory
338
+ --
339
+
340
+ CREATE TABLE inventory (
341
+ inventory_id INT NOT NULL,
342
+ film_id INT NOT NULL,
343
+ store_id INT NOT NULL,
344
+ last_update TIMESTAMP NOT NULL,
345
+ PRIMARY KEY (inventory_id),
346
+ CONSTRAINT fk_inventory_store FOREIGN KEY (store_id) REFERENCES store (store_id) ON DELETE NO ACTION ON UPDATE CASCADE,
347
+ CONSTRAINT fk_inventory_film FOREIGN KEY (film_id) REFERENCES film (film_id) ON DELETE NO ACTION ON UPDATE CASCADE
348
+ )
349
+ ;
350
+
351
+ CREATE INDEX idx_fk_film_id ON inventory(film_id)
352
+ ;
353
+
354
+ CREATE INDEX idx_fk_film_id_store_id ON inventory(store_id,film_id)
355
+ ;
356
+
357
+ CREATE TRIGGER inventory_trigger_ai AFTER INSERT ON inventory
358
+ BEGIN
359
+ UPDATE inventory SET last_update = DATETIME('NOW') WHERE rowid = new.rowid;
360
+ END
361
+ ;
362
+
363
+ CREATE TRIGGER inventory_trigger_au AFTER UPDATE ON inventory
364
+ BEGIN
365
+ UPDATE inventory SET last_update = DATETIME('NOW') WHERE rowid = new.rowid;
366
+ END
367
+ ;
368
+
369
+ --
370
+ -- Table structure for table staff
371
+ --
372
+
373
+ CREATE TABLE staff (
374
+ staff_id SMALLINT NOT NULL,
375
+ first_name VARCHAR(45) NOT NULL,
376
+ last_name VARCHAR(45) NOT NULL,
377
+ address_id INT NOT NULL,
378
+ picture BLOB DEFAULT NULL,
379
+ email VARCHAR(50) DEFAULT NULL,
380
+ store_id INT NOT NULL,
381
+ active SMALLINT DEFAULT 1 NOT NULL,
382
+ username VARCHAR(16) NOT NULL,
383
+ password VARCHAR(40) DEFAULT NULL,
384
+ last_update TIMESTAMP NOT NULL,
385
+ PRIMARY KEY (staff_id),
386
+ CONSTRAINT fk_staff_store FOREIGN KEY (store_id) REFERENCES store (store_id) ON DELETE NO ACTION ON UPDATE CASCADE,
387
+ CONSTRAINT fk_staff_address FOREIGN KEY (address_id) REFERENCES address (address_id) ON DELETE NO ACTION ON UPDATE CASCADE
388
+ )
389
+ ;
390
+ CREATE INDEX idx_fk_staff_store_id ON staff(store_id)
391
+ ;
392
+
393
+ CREATE INDEX idx_fk_staff_address_id ON staff(address_id)
394
+ ;
395
+
396
+ CREATE TRIGGER staff_trigger_ai AFTER INSERT ON staff
397
+ BEGIN
398
+ UPDATE staff SET last_update = DATETIME('NOW') WHERE rowid = new.rowid;
399
+ END
400
+ ;
401
+
402
+ CREATE TRIGGER staff_trigger_au AFTER UPDATE ON staff
403
+ BEGIN
404
+ UPDATE staff SET last_update = DATETIME('NOW') WHERE rowid = new.rowid;
405
+ END
406
+ ;
407
+
408
+ --
409
+ -- Table structure for table store
410
+ --
411
+
412
+ CREATE TABLE store (
413
+ store_id INT NOT NULL,
414
+ manager_staff_id SMALLINT NOT NULL,
415
+ address_id INT NOT NULL,
416
+ last_update TIMESTAMP NOT NULL,
417
+ PRIMARY KEY (store_id),
418
+ CONSTRAINT fk_store_staff FOREIGN KEY (manager_staff_id) REFERENCES staff (staff_id) ,
419
+ CONSTRAINT fk_store_address FOREIGN KEY (address_id) REFERENCES address (address_id)
420
+ )
421
+ ;
422
+
423
+ CREATE INDEX idx_store_fk_manager_staff_id ON store(manager_staff_id)
424
+ ;
425
+
426
+ CREATE INDEX idx_fk_store_address ON store(address_id)
427
+ ;
428
+
429
+ CREATE TRIGGER store_trigger_ai AFTER INSERT ON store
430
+ BEGIN
431
+ UPDATE store SET last_update = DATETIME('NOW') WHERE rowid = new.rowid;
432
+ END
433
+ ;
434
+
435
+ CREATE TRIGGER store_trigger_au AFTER UPDATE ON store
436
+ BEGIN
437
+ UPDATE store SET last_update = DATETIME('NOW') WHERE rowid = new.rowid;
438
+ END
439
+ ;
440
+
441
+ --
442
+ -- Table structure for table payment
443
+ --
444
+
445
+ CREATE TABLE payment (
446
+ payment_id int NOT NULL,
447
+ customer_id INT NOT NULL,
448
+ staff_id SMALLINT NOT NULL,
449
+ rental_id INT DEFAULT NULL,
450
+ amount DECIMAL(5,2) NOT NULL,
451
+ payment_date TIMESTAMP NOT NULL,
452
+ last_update TIMESTAMP NOT NULL,
453
+ PRIMARY KEY (payment_id),
454
+ CONSTRAINT fk_payment_rental FOREIGN KEY (rental_id) REFERENCES rental (rental_id) ON DELETE SET NULL ON UPDATE CASCADE,
455
+ CONSTRAINT fk_payment_customer FOREIGN KEY (customer_id) REFERENCES customer (customer_id) ,
456
+ CONSTRAINT fk_payment_staff FOREIGN KEY (staff_id) REFERENCES staff (staff_id)
457
+ )
458
+ ;
459
+ CREATE INDEX idx_fk_staff_id ON payment(staff_id)
460
+ ;
461
+ CREATE INDEX idx_fk_customer_id ON payment(customer_id)
462
+ ;
463
+
464
+ CREATE TRIGGER payment_trigger_ai AFTER INSERT ON payment
465
+ BEGIN
466
+ UPDATE payment SET last_update = DATETIME('NOW') WHERE rowid = new.rowid;
467
+ END
468
+ ;
469
+
470
+ CREATE TRIGGER payment_trigger_au AFTER UPDATE ON payment
471
+ BEGIN
472
+ UPDATE payment SET last_update = DATETIME('NOW') WHERE rowid = new.rowid;
473
+ END
474
+ ;
475
+
476
+ CREATE TABLE rental (
477
+ rental_id INT NOT NULL,
478
+ rental_date TIMESTAMP NOT NULL,
479
+ inventory_id INT NOT NULL,
480
+ customer_id INT NOT NULL,
481
+ return_date TIMESTAMP DEFAULT NULL,
482
+ staff_id SMALLINT NOT NULL,
483
+ last_update TIMESTAMP NOT NULL,
484
+ PRIMARY KEY (rental_id),
485
+ CONSTRAINT fk_rental_staff FOREIGN KEY (staff_id) REFERENCES staff (staff_id) ,
486
+ CONSTRAINT fk_rental_inventory FOREIGN KEY (inventory_id) REFERENCES inventory (inventory_id) ,
487
+ CONSTRAINT fk_rental_customer FOREIGN KEY (customer_id) REFERENCES customer (customer_id)
488
+ )
489
+ ;
490
+ CREATE INDEX idx_rental_fk_inventory_id ON rental(inventory_id)
491
+ ;
492
+ CREATE INDEX idx_rental_fk_customer_id ON rental(customer_id)
493
+ ;
494
+ CREATE INDEX idx_rental_fk_staff_id ON rental(staff_id)
495
+ ;
496
+ CREATE UNIQUE INDEX idx_rental_uq ON rental (rental_date,inventory_id,customer_id)
497
+ ;
498
+
499
+ CREATE TRIGGER rental_trigger_ai AFTER INSERT ON rental
500
+ BEGIN
501
+ UPDATE rental SET last_update = DATETIME('NOW') WHERE rowid = new.rowid;
502
+ END
503
+ ;
504
+
505
+ CREATE TRIGGER rental_trigger_au AFTER UPDATE ON rental
506
+ BEGIN
507
+ UPDATE rental SET last_update = DATETIME('NOW') WHERE rowid = new.rowid;
508
+ END
509
+ ;
510
+ --
511
+ -- View structure for view customer_list
512
+ --
513
+
514
+ CREATE VIEW customer_list
515
+ AS
516
+ SELECT cu.customer_id AS ID,
517
+ cu.first_name||' '||cu.last_name AS name,
518
+ a.address AS address,
519
+ a.postal_code AS zip_code,
520
+ a.phone AS phone,
521
+ city.city AS city,
522
+ country.country AS country,
523
+ case when cu.active=1 then 'active' else '' end AS notes,
524
+ cu.store_id AS SID
525
+ FROM customer AS cu JOIN address AS a ON cu.address_id = a.address_id JOIN city ON a.city_id = city.city_id
526
+ JOIN country ON city.country_id = country.country_id
527
+ ;
528
+ --
529
+ -- View structure for view film_list
530
+ --
531
+
532
+ CREATE VIEW film_list
533
+ AS
534
+ SELECT film.film_id AS FID,
535
+ film.title AS title,
536
+ film.description AS description,
537
+ category.name AS category,
538
+ film.rental_rate AS price,
539
+ film.length AS length,
540
+ film.rating AS rating,
541
+ actor.first_name||' '||actor.last_name AS actors
542
+ FROM category LEFT JOIN film_category ON category.category_id = film_category.category_id LEFT JOIN film ON film_category.film_id = film.film_id
543
+ JOIN film_actor ON film.film_id = film_actor.film_id
544
+ JOIN actor ON film_actor.actor_id = actor.actor_id
545
+ ;
546
+
547
+ --
548
+ -- View structure for view staff_list
549
+ --
550
+
551
+ CREATE VIEW staff_list
552
+ AS
553
+ SELECT s.staff_id AS ID,
554
+ s.first_name||' '||s.last_name AS name,
555
+ a.address AS address,
556
+ a.postal_code AS zip_code,
557
+ a.phone AS phone,
558
+ city.city AS city,
559
+ country.country AS country,
560
+ s.store_id AS SID
561
+ FROM staff AS s JOIN address AS a ON s.address_id = a.address_id JOIN city ON a.city_id = city.city_id
562
+ JOIN country ON city.country_id = country.country_id
563
+ ;
564
+ --
565
+ -- View structure for view sales_by_store
566
+ --
567
+
568
+ CREATE VIEW sales_by_store
569
+ AS
570
+ SELECT
571
+ s.store_id
572
+ ,c.city||','||cy.country AS store
573
+ ,m.first_name||' '||m.last_name AS manager
574
+ ,SUM(p.amount) AS total_sales
575
+ FROM payment AS p
576
+ INNER JOIN rental AS r ON p.rental_id = r.rental_id
577
+ INNER JOIN inventory AS i ON r.inventory_id = i.inventory_id
578
+ INNER JOIN store AS s ON i.store_id = s.store_id
579
+ INNER JOIN address AS a ON s.address_id = a.address_id
580
+ INNER JOIN city AS c ON a.city_id = c.city_id
581
+ INNER JOIN country AS cy ON c.country_id = cy.country_id
582
+ INNER JOIN staff AS m ON s.manager_staff_id = m.staff_id
583
+ GROUP BY
584
+ s.store_id
585
+ , c.city||','||cy.country
586
+ , m.first_name||' '||m.last_name
587
+ ;
588
+ --
589
+ -- View structure for view sales_by_film_category
590
+ --
591
+ -- Note that total sales will add up to >100% because
592
+ -- some titles belong to more than 1 category
593
+ --
594
+
595
+ CREATE VIEW sales_by_film_category
596
+ AS
597
+ SELECT
598
+ c.name AS category
599
+ , SUM(p.amount) AS total_sales
600
+ FROM payment AS p
601
+ INNER JOIN rental AS r ON p.rental_id = r.rental_id
602
+ INNER JOIN inventory AS i ON r.inventory_id = i.inventory_id
603
+ INNER JOIN film AS f ON i.film_id = f.film_id
604
+ INNER JOIN film_category AS fc ON f.film_id = fc.film_id
605
+ INNER JOIN category AS c ON fc.category_id = c.category_id
606
+ GROUP BY c.name
607
+ ;
608
+
609
+ --
610
+ -- View structure for view actor_info
611
+ --
612
+
613
+ /*
614
+ CREATE VIEW actor_info
615
+ AS
616
+ SELECT
617
+ a.actor_id,
618
+ a.first_name,
619
+ a.last_name,
620
+ GROUP_CONCAT(DISTINCT CONCAT(c.name, ': ',
621
+ (SELECT GROUP_CONCAT(f.title ORDER BY f.title SEPARATOR ', ')
622
+ FROM sakila.film f
623
+ INNER JOIN sakila.film_category fc
624
+ ON f.film_id = fc.film_id
625
+ INNER JOIN sakila.film_actor fa
626
+ ON f.film_id = fa.film_id
627
+ WHERE fc.category_id = c.category_id
628
+ AND fa.actor_id = a.actor_id
629
+ )
630
+ )
631
+ ORDER BY c.name SEPARATOR '; ')
632
+ AS film_info
633
+ FROM sakila.actor a
634
+ LEFT JOIN sakila.film_actor fa
635
+ ON a.actor_id = fa.actor_id
636
+ LEFT JOIN sakila.film_category fc
637
+ ON fa.film_id = fc.film_id
638
+ LEFT JOIN sakila.category c
639
+ ON fc.category_id = c.category_id
640
+ GROUP BY a.actor_id, a.first_name, a.last_name;
641
+ */
642
+
643
+ -- TO DO PROCEDURES
644
+ -- TO DO TRIGGERS
package-lock.json ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ {
2
+ "name": "backend",
3
+ "lockfileVersion": 3,
4
+ "requires": true,
5
+ "packages": {}
6
+ }
pyproject.toml ADDED
@@ -0,0 +1,25 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [project]
2
+ name = "insight-copilot"
3
+ version = "0.1.0"
4
+ description = "A modular, open-source Co-Pilot app built with LangGraph and CopilotKit"
5
+ dependencies = [
6
+ "fastapi>=0.68.0",
7
+ "uvicorn>=0.15.0",
8
+ "sqlalchemy>=1.4.0",
9
+ "pandas>=1.3.0",
10
+ "langchain>=0.0.200",
11
+ "langgraph>=0.0.10",
12
+ "python-dotenv>=0.19.0",
13
+ "pydantic>=1.8.0",
14
+ ]
15
+
16
+ [build-system]
17
+ requires = ["hatchling"]
18
+ build-backend = "hatchling.build"
19
+
20
+ [tool.hatch.build.targets.wheel]
21
+ packages = ["app"]
22
+
23
+ [tool.ruff]
24
+ line-length = 88
25
+ target-version = "py39"
requirements.txt ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ fastapi==0.115.12
2
+ uvicorn==0.34.2
3
+ sqlalchemy==2.0.40
4
+ pandas==2.2.3
5
+ langchain==0.3.25
6
+ langgraph==0.4.3
7
+ python-dotenv==1.1.0
8
+ pydantic==2.11.4
9
+ copilotkit==0.1.45
10
+ langchain-openai==0.3.16
11
+ ipython==9.2.0