chuckfinca commited on
Commit
929cde0
·
1 Parent(s): 1550049

feat(recommender): Implement generative synthesis for educator-friendly output

Browse files

This commit completes the core Proof-of-Concept by implementing the final Generation step of the RAG pipeline. The system now moves beyond simple document retrieval to provide a fully synthesized, actionable recommendation tailored for educators.

This fulfills the critical project requirement that the output must be clear and directly usable, not just a list of raw search results. This work completes the objectives outlined in Phase 3 of the implementation plan.

Key Changes:
- **Added Generative Synthesis**: Introduced a function in . This function leverages the Google Gemini API to process the top-k retrieved document chunks and synthesize them into a coherent, practical recommendation.
- **Created Prompt Templates**: Added a new module to store and manage persona-specific prompts. The initial implementation includes a detailed prompt for the 'teacher' persona, designed to produce professional and encouraging output.
- **Integrated into Main Application**: Refactored to call the new synthesis function. The application now prints a final, formatted recommendation, including the synthesized advice and the source documents used as evidence.
- **Updated Dependencies**: Added and to to support the new generative capabilities.

Docs:
- Updated to reflect the completion of Phase 3 tasks.

pyproject.toml CHANGED
@@ -23,6 +23,8 @@ dependencies = [
23
  "sentence-transformers",
24
  "faiss-cpu",
25
  "transformers",
 
 
26
  ]
27
 
28
  [project.scripts]
@@ -42,4 +44,15 @@ dev = [
42
  where = ["src"]
43
 
44
  [tool.setuptools.package-dir]
45
- "" = "src"
 
 
 
 
 
 
 
 
 
 
 
 
23
  "sentence-transformers",
24
  "faiss-cpu",
25
  "transformers",
26
+ "google-generativeai",
27
+ "python-dotenv",
28
  ]
29
 
30
  [project.scripts]
 
44
  where = ["src"]
45
 
46
  [tool.setuptools.package-dir]
47
+ "" = "src"
48
+
49
+ [tool.ruff.lint]
50
+ # Add any specific rules you want to enforce here in the future.
51
+ # For now, we will just define what to exclude.
52
+
53
+ exclude = [
54
+ ".venv",
55
+ "*/__pycache__/*", # Also excludes pycache within subdirectories
56
+ "src/fot_intervention_recommender.egg-info",
57
+ "*.ipynb", # <-- THIS IS THE KEY LINE TO EXCLUDE ALL NOTEBOOKS
58
+ ]
scripts/build_knowledge_base.py CHANGED
@@ -7,8 +7,9 @@ from pathlib import Path
7
  project_root = Path(__file__).parent.parent
8
  sys.path.append(str(project_root))
9
 
10
- from src.fot_recommender.config import RAW_KB_PATH, PROCESSED_DATA_DIR
11
- from src.fot_recommender.semantic_chunker import chunk_by_concept
 
12
 
13
  def build():
14
  """
@@ -31,7 +32,7 @@ def build():
31
  except FileNotFoundError:
32
  print(f"ERROR: Raw knowledge base file not found at {RAW_KB_PATH}. Halting.")
33
  return
34
-
35
  print(f"Loaded {len(raw_kb)} raw entries.")
36
 
37
  # 2. Process and chunk the knowledge base using the existing chunker
@@ -41,10 +42,10 @@ def build():
41
 
42
  # 3. Save the final chunked file
43
  print(f"Saving final chunked knowledge base to: {final_chunks_path}")
44
-
45
  # Ensure the 'processed' directory exists before trying to write to it
46
  PROCESSED_DATA_DIR.mkdir(parents=True, exist_ok=True)
47
-
48
  with open(final_chunks_path, "w", encoding="utf-8") as f:
49
  # We use indent=4 to make the final JSON file human-readable,
50
  # which is extremely helpful for debugging and verification.
@@ -53,5 +54,6 @@ def build():
53
  print("\n✅ Success! The final knowledge base is built and ready.")
54
  print("You can now run the main application.")
55
 
 
56
  if __name__ == "__main__":
57
- build()
 
7
  project_root = Path(__file__).parent.parent
8
  sys.path.append(str(project_root))
9
 
10
+ from src.fot_recommender.config import RAW_KB_PATH, PROCESSED_DATA_DIR # noqa: E402
11
+ from src.fot_recommender.semantic_chunker import chunk_by_concept # noqa: E402
12
+
13
 
14
  def build():
15
  """
 
32
  except FileNotFoundError:
33
  print(f"ERROR: Raw knowledge base file not found at {RAW_KB_PATH}. Halting.")
34
  return
35
+
36
  print(f"Loaded {len(raw_kb)} raw entries.")
37
 
38
  # 2. Process and chunk the knowledge base using the existing chunker
 
42
 
43
  # 3. Save the final chunked file
44
  print(f"Saving final chunked knowledge base to: {final_chunks_path}")
45
+
46
  # Ensure the 'processed' directory exists before trying to write to it
47
  PROCESSED_DATA_DIR.mkdir(parents=True, exist_ok=True)
48
+
49
  with open(final_chunks_path, "w", encoding="utf-8") as f:
50
  # We use indent=4 to make the final JSON file human-readable,
51
  # which is extremely helpful for debugging and verification.
 
54
  print("\n✅ Success! The final knowledge base is built and ready.")
55
  print("You can now run the main application.")
56
 
57
+
58
  if __name__ == "__main__":
59
+ build()
src/fot_recommender/__init__.py CHANGED
@@ -1,3 +1,3 @@
1
- from .main import main
2
 
3
  __all__ = ["main"]
 
1
+ from fot_recommender.main import main
2
 
3
  __all__ = ["main"]
src/fot_recommender/main.py CHANGED
@@ -1,10 +1,11 @@
1
- from .config import PROCESSED_DATA_DIR
2
- from .rag_pipeline import (
3
  load_knowledge_base,
4
  initialize_embedding_model,
5
  create_embeddings,
6
  create_vector_db,
7
  search_interventions,
 
8
  )
9
 
10
  # --- Sample Student Profile from Project Description ---
@@ -68,26 +69,32 @@ def main():
68
  model=embedding_model,
69
  index=vector_db,
70
  knowledge_base=knowledge_base_chunks,
 
71
  k=3,
72
  )
73
 
74
- print("\n--- Top 3 Recommended Intervention Chunks ---")
75
- for i, (chunk, score) in enumerate(top_interventions):
76
- print(f"\n--- Recommendation {i + 1} (Score: {score:.4f}) ---")
77
- print(f"Title: {chunk['title']}")
78
- print(f"Source: {chunk['source_document']} ({chunk['fot_pages']})")
79
- # To keep the output clean, we'll show the first 300 chars of the content
80
- print(f"Content Snippet: {chunk['original_content'][:300]}...")
81
 
82
- print("-" * 50)
83
- print("\n✅ PHASE 2 (RAG Pipeline Implementation) is complete!")
84
- print(
85
- "The system can now retrieve relevant interventions based on a student narrative."
86
- )
87
- print(
88
- "\nNext step: Phase 3 - System Integration & Testing (Formatting the final output for educators)."
89
  )
90
 
 
 
 
 
 
 
 
 
 
 
 
 
 
91
 
92
  if __name__ == "__main__":
93
  main()
 
1
+ from fot_recommender.config import PROCESSED_DATA_DIR
2
+ from fot_recommender.rag_pipeline import (
3
  load_knowledge_base,
4
  initialize_embedding_model,
5
  create_embeddings,
6
  create_vector_db,
7
  search_interventions,
8
+ generate_recommendation_summary
9
  )
10
 
11
  # --- Sample Student Profile from Project Description ---
 
69
  model=embedding_model,
70
  index=vector_db,
71
  knowledge_base=knowledge_base_chunks,
72
+ min_similarity_score=0.4,
73
  k=3,
74
  )
75
 
76
+ if not top_interventions:
77
+ print("Could not find relevant interventions for the student.")
78
+ return
 
 
 
 
79
 
80
+ # --- 4. Generate Synthesized Recommendation (for 'teacher' persona) ---
81
+ synthesized_recommendation = generate_recommendation_summary(
82
+ top_interventions, student_query, persona="teacher"
 
 
 
 
83
  )
84
 
85
+ # --- 5. Display Final Output ---
86
+ print("\n" + "="*50)
87
+ print(" FINAL SYNTHESIZED RECOMMENDATION FOR EDUCATOR")
88
+ print("="*50 + "\n")
89
+ print(synthesized_recommendation)
90
+
91
+ print("\n" + "-"*50)
92
+ print("Evidence retrieved from the following sources:")
93
+ for chunk, score in top_interventions:
94
+ print(f"- {chunk['title']} (Source: {chunk['source_document']}, Relevance: {score:.2f})")
95
+
96
+ print("\n\n✅ Full RAG process complete!")
97
+
98
 
99
  if __name__ == "__main__":
100
  main()
src/fot_recommender/prompts.py ADDED
@@ -0,0 +1,37 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ PROMPT_TEMPLATES = {
2
+ "teacher": """
3
+ You are an expert educational strategist AI, a supportive co-pilot for high school teachers. Your tone is professional, encouraging, and practical.
4
+
5
+ **Student Profile:**
6
+ {student_narrative}
7
+
8
+ **Evidence-Based Interventions:**
9
+ Based on the student's profile, the following intervention strategies have been identified from our knowledge base:
10
+
11
+ --- BEGIN CONTEXT ---
12
+ {context}
13
+ --- END CONTEXT ---
14
+
15
+ **Your Task:**
16
+ Synthesize the provided context to write a concise, actionable recommendation for the student's teacher.
17
+
18
+ **Instructions:**
19
+ 1. Start with a brief summary of the student's key challenges.
20
+ 2. Recommend 2-3 concrete, actionable strategies derived *only* from the provided context.
21
+ 3. For each strategy, briefly explain *why* it is relevant to this student, citing the core ideas from the sources.
22
+ 4. Do not invent information. Ground your entire response in the provided context.
23
+ 5. Format the output clearly using Markdown for readability.
24
+ """,
25
+ "parent": """
26
+ # THIS IS A PLACEHOLDER PROMPT. A detailed prompt would be developed next.
27
+ # Synthesize the context into simple, non-jargon language for a parent.
28
+ # Student: {student_narrative}
29
+ # Context: {context}
30
+ """,
31
+ "principal": """
32
+ # THIS IS A PLACEHOLDER PROMPT. A detailed prompt would be developed next.
33
+ # Synthesize the context into a strategic overview for a principal.
34
+ # Student: {student_narrative}
35
+ # Context: {context}
36
+ """,
37
+ }
src/fot_recommender/rag_pipeline.py CHANGED
@@ -1,8 +1,12 @@
1
  import faiss # type: ignore
2
  import json
3
  import numpy as np
 
 
 
4
  from sentence_transformers import SentenceTransformer
5
  from typing import List, Dict, Any, Tuple
 
6
 
7
 
8
  def load_knowledge_base(path: str) -> List[Dict[str, Any]]:
@@ -72,6 +76,7 @@ def search_interventions(
72
  index: faiss.Index,
73
  knowledge_base: List[Dict[str, Any]],
74
  k: int = 3,
 
75
  ) -> List[Tuple[Dict[str, Any], float]]:
76
  """
77
  Performs a semantic search to find the most relevant interventions.
@@ -92,5 +97,53 @@ def search_interventions(
92
  if i != -1: # FAISS returns -1 for no result
93
  results.append((knowledge_base[i], score))
94
 
95
- print(f"Found {len(results)} relevant interventions.")
96
- return results
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  import faiss # type: ignore
2
  import json
3
  import numpy as np
4
+ import google.generativeai as genai
5
+ import os
6
+ from dotenv import load_dotenv
7
  from sentence_transformers import SentenceTransformer
8
  from typing import List, Dict, Any, Tuple
9
+ from fot_recommender.prompts import PROMPT_TEMPLATES
10
 
11
 
12
  def load_knowledge_base(path: str) -> List[Dict[str, Any]]:
 
76
  index: faiss.Index,
77
  knowledge_base: List[Dict[str, Any]],
78
  k: int = 3,
79
+ min_similarity_score: float = 0.4,
80
  ) -> List[Tuple[Dict[str, Any], float]]:
81
  """
82
  Performs a semantic search to find the most relevant interventions.
 
97
  if i != -1: # FAISS returns -1 for no result
98
  results.append((knowledge_base[i], score))
99
 
100
+ filtered_results = [
101
+ (chunk, score) for chunk, score in results if score >= min_similarity_score
102
+ ]
103
+
104
+ print(f"Found {len(filtered_results)} relevant interventions.")
105
+ return filtered_results
106
+
107
+
108
+ def generate_recommendation_summary(
109
+ retrieved_chunks: List[Tuple[Dict[str, Any], float]],
110
+ student_narrative: str,
111
+ persona: str = "teacher",
112
+ ) -> str:
113
+ """
114
+ Generates a synthesized recommendation using the Google Gemini API,
115
+ tailored to a specific persona.
116
+ """
117
+ load_dotenv()
118
+ api_key = os.getenv("GOOGLE_API_KEY")
119
+ if not api_key:
120
+ return "ERROR: GOOGLE_API_KEY not found. Please create a .env file and add your key."
121
+
122
+ genai.configure(api_key=api_key) # type: ignore
123
+
124
+ if persona not in PROMPT_TEMPLATES:
125
+ return f"ERROR: Persona '{persona}' is not a valid choice."
126
+
127
+ # 1. Prepare the context from retrieved chunks for the prompt
128
+ context = ""
129
+ for i, (chunk, _) in enumerate(retrieved_chunks):
130
+ context += f"--- Intervention Chunk {i + 1} ---\n"
131
+ context += f"Title: {chunk['title']}\n"
132
+ context += f"Content: {chunk['original_content']}\n"
133
+ context += f"(Source Document: {chunk['source_document']})\n\n"
134
+
135
+ # 2. Select and format the persona-specific prompt
136
+ prompt_template = PROMPT_TEMPLATES[persona]
137
+ prompt = prompt_template.format(
138
+ student_narrative=student_narrative, context=context
139
+ )
140
+
141
+ # 3. Call the Gemini API
142
+ try:
143
+ print(f"\nSynthesizing recommendation for persona: '{persona}' using Gemini...")
144
+ model = genai.GenerativeModel("gemini-1.5-flash-latest") # type: ignore
145
+ response = model.generate_content(prompt)
146
+ print("Synthesis complete.")
147
+ return response.text
148
+ except Exception as e:
149
+ return f"An error occurred while calling the Gemini API: {e}"
src/fot_recommender/semantic_chunker.py CHANGED
@@ -1,6 +1,7 @@
1
  import collections
2
  from typing import List, Dict, Any
3
 
 
4
  def _serialize_table_to_markdown(table_data: List[Dict[str, Any]]) -> str:
5
  """
6
  Converts a list of dictionaries (representing a table) into a Markdown string.
@@ -11,28 +12,31 @@ def _serialize_table_to_markdown(table_data: List[Dict[str, Any]]) -> str:
11
 
12
  # Extract headers from the first row
13
  headers = table_data[0].keys()
14
-
15
  # Create Markdown header row and separator row
16
  md_header = "| " + " | ".join(headers) + " |"
17
- md_separator = "|-" + "-|-".join(['-' * len(h) for h in headers]) + "-|" # Basic separator
 
 
18
 
19
  # Create Markdown data rows
20
  md_rows = [md_header, md_separator]
21
  for row_dict in table_data:
22
  # Ensure all keys from headers are present, even if with None values
23
- row_values = [str(row_dict.get(header, '')) for header in headers]
24
  md_rows.append("| " + " | ".join(row_values) + " |")
25
-
26
  return "\n".join(md_rows)
27
 
 
28
  def chunk_by_concept(raw_knowledge_base: List[Dict[str, Any]]) -> List[Dict[str, Any]]:
29
  """
30
- Groups items from the raw knowledge base by a composite key of
31
  (source_document, concept) to create high-quality, coherent semantic chunks.
32
  Includes table data serialization for embedding.
33
 
34
  Args:
35
- raw_knowledge_base: A list of dictionaries, where each dict represents a page
36
  or a piece of content from the source documents.
37
 
38
  Returns:
@@ -40,39 +44,43 @@ def chunk_by_concept(raw_knowledge_base: List[Dict[str, Any]]) -> List[Dict[str,
40
  """
41
  grouped_by_source_and_concept = collections.defaultdict(list)
42
  for item in raw_knowledge_base:
43
- composite_key = (item['source_document'], item['concept'])
44
  grouped_by_source_and_concept[composite_key].append(item)
45
 
46
  final_chunks = []
47
  for (source_doc, concept), items in grouped_by_source_and_concept.items():
48
- items.sort(key=lambda x: x.get('absolute_page', 0))
49
 
50
  # Collect all content and any table data
51
  all_content_parts = []
52
  for item in items:
53
- if item.get('content'):
54
- all_content_parts.append(item['content'])
55
- if item.get('table_data'):
56
  # Serialize table data to Markdown and add it as a content part
57
- table_md = _serialize_table_to_markdown(item['table_data'])
58
- if table_md: # Only add if serialization produced something
59
  all_content_parts.append(f"\nExample Table:\n{table_md}")
60
-
61
  combined_content = "\n\n".join(all_content_parts).strip()
62
-
63
- pages = sorted(list(set(item['absolute_page'] for item in items if 'absolute_page' in item)))
 
 
 
 
64
  page_str = f"Pages: {', '.join(map(str, pages))}" if pages else "N/A"
65
-
66
  # Prepend title to content for embedding
67
  content_for_embedding = f"Title: {concept}. Content: {combined_content}"
68
-
69
  final_chunk = {
70
  "title": concept,
71
  "source_document": source_doc,
72
  "fot_pages": page_str,
73
  "content_for_embedding": content_for_embedding,
74
- "original_content": combined_content # Keep original for potential display
75
  }
76
  final_chunks.append(final_chunk)
77
-
78
- return final_chunks
 
1
  import collections
2
  from typing import List, Dict, Any
3
 
4
+
5
  def _serialize_table_to_markdown(table_data: List[Dict[str, Any]]) -> str:
6
  """
7
  Converts a list of dictionaries (representing a table) into a Markdown string.
 
12
 
13
  # Extract headers from the first row
14
  headers = table_data[0].keys()
15
+
16
  # Create Markdown header row and separator row
17
  md_header = "| " + " | ".join(headers) + " |"
18
+ md_separator = (
19
+ "|-" + "-|-".join(["-" * len(h) for h in headers]) + "-|"
20
+ ) # Basic separator
21
 
22
  # Create Markdown data rows
23
  md_rows = [md_header, md_separator]
24
  for row_dict in table_data:
25
  # Ensure all keys from headers are present, even if with None values
26
+ row_values = [str(row_dict.get(header, "")) for header in headers]
27
  md_rows.append("| " + " | ".join(row_values) + " |")
28
+
29
  return "\n".join(md_rows)
30
 
31
+
32
  def chunk_by_concept(raw_knowledge_base: List[Dict[str, Any]]) -> List[Dict[str, Any]]:
33
  """
34
+ Groups items from the raw knowledge base by a composite key of
35
  (source_document, concept) to create high-quality, coherent semantic chunks.
36
  Includes table data serialization for embedding.
37
 
38
  Args:
39
+ raw_knowledge_base: A list of dictionaries, where each dict represents a page
40
  or a piece of content from the source documents.
41
 
42
  Returns:
 
44
  """
45
  grouped_by_source_and_concept = collections.defaultdict(list)
46
  for item in raw_knowledge_base:
47
+ composite_key = (item["source_document"], item["concept"])
48
  grouped_by_source_and_concept[composite_key].append(item)
49
 
50
  final_chunks = []
51
  for (source_doc, concept), items in grouped_by_source_and_concept.items():
52
+ items.sort(key=lambda x: x.get("absolute_page", 0))
53
 
54
  # Collect all content and any table data
55
  all_content_parts = []
56
  for item in items:
57
+ if item.get("content"):
58
+ all_content_parts.append(item["content"])
59
+ if item.get("table_data"):
60
  # Serialize table data to Markdown and add it as a content part
61
+ table_md = _serialize_table_to_markdown(item["table_data"])
62
+ if table_md: # Only add if serialization produced something
63
  all_content_parts.append(f"\nExample Table:\n{table_md}")
64
+
65
  combined_content = "\n\n".join(all_content_parts).strip()
66
+
67
+ pages = sorted(
68
+ list(
69
+ set(item["absolute_page"] for item in items if "absolute_page" in item)
70
+ )
71
+ )
72
  page_str = f"Pages: {', '.join(map(str, pages))}" if pages else "N/A"
73
+
74
  # Prepend title to content for embedding
75
  content_for_embedding = f"Title: {concept}. Content: {combined_content}"
76
+
77
  final_chunk = {
78
  "title": concept,
79
  "source_document": source_doc,
80
  "fot_pages": page_str,
81
  "content_for_embedding": content_for_embedding,
82
+ "original_content": combined_content, # Keep original for potential display
83
  }
84
  final_chunks.append(final_chunk)
85
+
86
+ return final_chunks
src/fot_recommender/utils.py CHANGED
@@ -1,5 +1,6 @@
1
  from typing import List, Dict, Any, Tuple
2
 
 
3
  def display_recommendations(results: List[Tuple[Dict[str, Any], float]]):
4
  """
5
  A helper function to neatly print the results of a semantic search.
@@ -18,9 +19,9 @@ def display_recommendations(results: List[Tuple[Dict[str, Any], float]]):
18
  print(f"\n--- Recommendation {i + 1} (Similarity Score: {score:.4f}) ---")
19
  print(f" Title: {chunk['title']}")
20
  print(f" Source: {chunk['source_document']} ({chunk['fot_pages']})")
21
-
22
  # Indent the content for better readability
23
- content = chunk['original_content']
24
  indented_content = "\n ".join(content.splitlines())
25
- print(f" \n Content Snippet:\n \"{indented_content[:500]}...\"")
26
  print("-" * 50)
 
1
  from typing import List, Dict, Any, Tuple
2
 
3
+
4
  def display_recommendations(results: List[Tuple[Dict[str, Any], float]]):
5
  """
6
  A helper function to neatly print the results of a semantic search.
 
19
  print(f"\n--- Recommendation {i + 1} (Similarity Score: {score:.4f}) ---")
20
  print(f" Title: {chunk['title']}")
21
  print(f" Source: {chunk['source_document']} ({chunk['fot_pages']})")
22
+
23
  # Indent the content for better readability
24
+ content = chunk["original_content"]
25
  indented_content = "\n ".join(content.splitlines())
26
+ print(f' \n Content Snippet:\n "{indented_content[:500]}..."')
27
  print("-" * 50)
uv.lock CHANGED
@@ -3,7 +3,8 @@ revision = 2
3
  requires-python = ">=3.12"
4
  resolution-markers = [
5
  "python_full_version >= '3.14'",
6
- "python_full_version < '3.14'",
 
7
  ]
8
 
9
  [[package]]
@@ -80,7 +81,8 @@ name = "argon2-cffi-bindings"
80
  version = "25.1.0"
81
  source = { registry = "https://pypi.org/simple" }
82
  resolution-markers = [
83
- "python_full_version < '3.14'",
 
84
  ]
85
  dependencies = [
86
  { name = "cffi", marker = "python_full_version < '3.14'" },
@@ -212,6 +214,15 @@ css = [
212
  { name = "tinycss2" },
213
  ]
214
 
 
 
 
 
 
 
 
 
 
215
  [[package]]
216
  name = "certifi"
217
  version = "2025.8.3"
@@ -415,8 +426,10 @@ version = "0.1.0"
415
  source = { editable = "." }
416
  dependencies = [
417
  { name = "faiss-cpu" },
 
418
  { name = "langchain" },
419
  { name = "numpy" },
 
420
  { name = "sentence-transformers" },
421
  { name = "setuptools" },
422
  { name = "torch" },
@@ -437,12 +450,14 @@ dev = [
437
  requires-dist = [
438
  { name = "black", marker = "extra == 'dev'", specifier = ">=25.1.0" },
439
  { name = "faiss-cpu" },
 
440
  { name = "jupyterlab", marker = "extra == 'dev'", specifier = ">=4.0" },
441
  { name = "langchain" },
442
  { name = "mypy", marker = "extra == 'dev'", specifier = ">=1.16.1" },
443
  { name = "notebook", marker = "extra == 'dev'", specifier = ">=7.0" },
444
  { name = "numpy", specifier = "<2.0" },
445
  { name = "pytest", marker = "extra == 'dev'", specifier = ">=8.4.1" },
 
446
  { name = "ruff", marker = "extra == 'dev'", specifier = ">=0.12.2" },
447
  { name = "sentence-transformers" },
448
  { name = "setuptools", specifier = ">=80.9.0" },
@@ -469,6 +484,116 @@ wheels = [
469
  { url = "https://files.pythonhosted.org/packages/2f/e0/014d5d9d7a4564cf1c40b5039bc882db69fd881111e03ab3657ac0b218e2/fsspec-2025.7.0-py3-none-any.whl", hash = "sha256:8b012e39f63c7d5f10474de957f3ab793b47b45ae7d39f2fb735f8bbe25c0e21", size = 199597, upload-time = "2025-07-15T16:05:19.529Z" },
470
  ]
471
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
472
  [[package]]
473
  name = "greenlet"
474
  version = "3.2.3"
@@ -502,6 +627,48 @@ wheels = [
502
  { url = "https://files.pythonhosted.org/packages/5c/4f/aab73ecaa6b3086a4c89863d94cf26fa84cbff63f52ce9bc4342b3087a06/greenlet-3.2.3-cp314-cp314-win_amd64.whl", hash = "sha256:8c47aae8fbbfcf82cc13327ae802ba13c9c36753b67e760023fd116bc124a62a", size = 301236, upload-time = "2025-06-05T16:15:20.111Z" },
503
  ]
504
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
505
  [[package]]
506
  name = "h11"
507
  version = "0.16.0"
@@ -539,6 +706,18 @@ wheels = [
539
  { url = "https://files.pythonhosted.org/packages/7e/f5/f66802a942d491edb555dd61e3a9961140fd64c90bce1eafd741609d334d/httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55", size = 78784, upload-time = "2025-04-24T22:06:20.566Z" },
540
  ]
541
 
 
 
 
 
 
 
 
 
 
 
 
 
542
  [[package]]
543
  name = "httpx"
544
  version = "0.28.1"
@@ -1537,6 +1716,32 @@ wheels = [
1537
  { url = "https://files.pythonhosted.org/packages/ce/4f/5249960887b1fbe561d9ff265496d170b55a735b76724f10ef19f9e40716/prompt_toolkit-3.0.51-py3-none-any.whl", hash = "sha256:52742911fde84e2d423e2f9a4cf1de7d7ac4e51958f648d9540e0fb8db077b07", size = 387810, upload-time = "2025-04-15T09:18:44.753Z" },
1538
  ]
1539
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1540
  [[package]]
1541
  name = "psutil"
1542
  version = "7.0.0"
@@ -1570,6 +1775,27 @@ wheels = [
1570
  { url = "https://files.pythonhosted.org/packages/8e/37/efad0257dc6e593a18957422533ff0f87ede7c9c6ea010a2177d738fb82f/pure_eval-0.2.3-py3-none-any.whl", hash = "sha256:1db8e35b67b3d218d818ae653e27f06c3aa420901fa7b081ca98cbedc874e0d0", size = 11842, upload-time = "2024-07-21T12:58:20.04Z" },
1571
  ]
1572
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1573
  [[package]]
1574
  name = "pycparser"
1575
  version = "2.22"
@@ -1645,6 +1871,15 @@ wheels = [
1645
  { url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" },
1646
  ]
1647
 
 
 
 
 
 
 
 
 
 
1648
  [[package]]
1649
  name = "pytest"
1650
  version = "8.4.1"
@@ -1673,6 +1908,15 @@ wheels = [
1673
  { url = "https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427", size = 229892, upload-time = "2024-03-01T18:36:18.57Z" },
1674
  ]
1675
 
 
 
 
 
 
 
 
 
 
1676
  [[package]]
1677
  name = "python-json-logger"
1678
  version = "3.3.0"
@@ -1978,6 +2222,18 @@ wheels = [
1978
  { url = "https://files.pythonhosted.org/packages/75/04/5302cea1aa26d886d34cadbf2dc77d90d7737e576c0065f357b96dc7a1a6/rpds_py-0.26.0-cp314-cp314t-win_amd64.whl", hash = "sha256:f14440b9573a6f76b4ee4770c13f0b5921f71dde3b6fcb8dabbefd13b7fe05d7", size = 232821, upload-time = "2025-07-01T15:55:55.167Z" },
1979
  ]
1980
 
 
 
 
 
 
 
 
 
 
 
 
 
1981
  [[package]]
1982
  name = "ruff"
1983
  version = "0.12.7"
@@ -2429,6 +2685,15 @@ wheels = [
2429
  { url = "https://files.pythonhosted.org/packages/e7/00/3fca040d7cf8a32776d3d81a00c8ee7457e00f80c649f1e4a863c8321ae9/uri_template-1.3.0-py3-none-any.whl", hash = "sha256:a44a133ea12d44a0c0f06d7d42a52d71282e77e2f937d8abd5655b8d56fc1363", size = 11140, upload-time = "2023-06-21T01:49:03.467Z" },
2430
  ]
2431
 
 
 
 
 
 
 
 
 
 
2432
  [[package]]
2433
  name = "urllib3"
2434
  version = "2.5.0"
 
3
  requires-python = ">=3.12"
4
  resolution-markers = [
5
  "python_full_version >= '3.14'",
6
+ "python_full_version == '3.13.*'",
7
+ "python_full_version < '3.13'",
8
  ]
9
 
10
  [[package]]
 
81
  version = "25.1.0"
82
  source = { registry = "https://pypi.org/simple" }
83
  resolution-markers = [
84
+ "python_full_version == '3.13.*'",
85
+ "python_full_version < '3.13'",
86
  ]
87
  dependencies = [
88
  { name = "cffi", marker = "python_full_version < '3.14'" },
 
214
  { name = "tinycss2" },
215
  ]
216
 
217
+ [[package]]
218
+ name = "cachetools"
219
+ version = "5.5.2"
220
+ source = { registry = "https://pypi.org/simple" }
221
+ sdist = { url = "https://files.pythonhosted.org/packages/6c/81/3747dad6b14fa2cf53fcf10548cf5aea6913e96fab41a3c198676f8948a5/cachetools-5.5.2.tar.gz", hash = "sha256:1a661caa9175d26759571b2e19580f9d6393969e5dfca11fdb1f947a23e640d4", size = 28380, upload-time = "2025-02-20T21:01:19.524Z" }
222
+ wheels = [
223
+ { url = "https://files.pythonhosted.org/packages/72/76/20fa66124dbe6be5cafeb312ece67de6b61dd91a0247d1ea13db4ebb33c2/cachetools-5.5.2-py3-none-any.whl", hash = "sha256:d26a22bcc62eb95c3beabd9f1ee5e820d3d2704fe2967cbe350e20c8ffcd3f0a", size = 10080, upload-time = "2025-02-20T21:01:16.647Z" },
224
+ ]
225
+
226
  [[package]]
227
  name = "certifi"
228
  version = "2025.8.3"
 
426
  source = { editable = "." }
427
  dependencies = [
428
  { name = "faiss-cpu" },
429
+ { name = "google-generativeai" },
430
  { name = "langchain" },
431
  { name = "numpy" },
432
+ { name = "python-dotenv" },
433
  { name = "sentence-transformers" },
434
  { name = "setuptools" },
435
  { name = "torch" },
 
450
  requires-dist = [
451
  { name = "black", marker = "extra == 'dev'", specifier = ">=25.1.0" },
452
  { name = "faiss-cpu" },
453
+ { name = "google-generativeai" },
454
  { name = "jupyterlab", marker = "extra == 'dev'", specifier = ">=4.0" },
455
  { name = "langchain" },
456
  { name = "mypy", marker = "extra == 'dev'", specifier = ">=1.16.1" },
457
  { name = "notebook", marker = "extra == 'dev'", specifier = ">=7.0" },
458
  { name = "numpy", specifier = "<2.0" },
459
  { name = "pytest", marker = "extra == 'dev'", specifier = ">=8.4.1" },
460
+ { name = "python-dotenv" },
461
  { name = "ruff", marker = "extra == 'dev'", specifier = ">=0.12.2" },
462
  { name = "sentence-transformers" },
463
  { name = "setuptools", specifier = ">=80.9.0" },
 
484
  { url = "https://files.pythonhosted.org/packages/2f/e0/014d5d9d7a4564cf1c40b5039bc882db69fd881111e03ab3657ac0b218e2/fsspec-2025.7.0-py3-none-any.whl", hash = "sha256:8b012e39f63c7d5f10474de957f3ab793b47b45ae7d39f2fb735f8bbe25c0e21", size = 199597, upload-time = "2025-07-15T16:05:19.529Z" },
485
  ]
486
 
487
+ [[package]]
488
+ name = "google-ai-generativelanguage"
489
+ version = "0.6.15"
490
+ source = { registry = "https://pypi.org/simple" }
491
+ dependencies = [
492
+ { name = "google-api-core", extra = ["grpc"] },
493
+ { name = "google-auth" },
494
+ { name = "proto-plus" },
495
+ { name = "protobuf" },
496
+ ]
497
+ sdist = { url = "https://files.pythonhosted.org/packages/11/d1/48fe5d7a43d278e9f6b5ada810b0a3530bbeac7ed7fcbcd366f932f05316/google_ai_generativelanguage-0.6.15.tar.gz", hash = "sha256:8f6d9dc4c12b065fe2d0289026171acea5183ebf2d0b11cefe12f3821e159ec3", size = 1375443, upload-time = "2025-01-13T21:50:47.459Z" }
498
+ wheels = [
499
+ { url = "https://files.pythonhosted.org/packages/7c/a3/67b8a6ff5001a1d8864922f2d6488dc2a14367ceb651bc3f09a947f2f306/google_ai_generativelanguage-0.6.15-py3-none-any.whl", hash = "sha256:5a03ef86377aa184ffef3662ca28f19eeee158733e45d7947982eb953c6ebb6c", size = 1327356, upload-time = "2025-01-13T21:50:44.174Z" },
500
+ ]
501
+
502
+ [[package]]
503
+ name = "google-api-core"
504
+ version = "2.25.1"
505
+ source = { registry = "https://pypi.org/simple" }
506
+ dependencies = [
507
+ { name = "google-auth" },
508
+ { name = "googleapis-common-protos" },
509
+ { name = "proto-plus" },
510
+ { name = "protobuf" },
511
+ { name = "requests" },
512
+ ]
513
+ sdist = { url = "https://files.pythonhosted.org/packages/dc/21/e9d043e88222317afdbdb567165fdbc3b0aad90064c7e0c9eb0ad9955ad8/google_api_core-2.25.1.tar.gz", hash = "sha256:d2aaa0b13c78c61cb3f4282c464c046e45fbd75755683c9c525e6e8f7ed0a5e8", size = 165443, upload-time = "2025-06-12T20:52:20.439Z" }
514
+ wheels = [
515
+ { url = "https://files.pythonhosted.org/packages/14/4b/ead00905132820b623732b175d66354e9d3e69fcf2a5dcdab780664e7896/google_api_core-2.25.1-py3-none-any.whl", hash = "sha256:8a2a56c1fef82987a524371f99f3bd0143702fecc670c72e600c1cda6bf8dbb7", size = 160807, upload-time = "2025-06-12T20:52:19.334Z" },
516
+ ]
517
+
518
+ [package.optional-dependencies]
519
+ grpc = [
520
+ { name = "grpcio" },
521
+ { name = "grpcio-status" },
522
+ ]
523
+
524
+ [[package]]
525
+ name = "google-api-python-client"
526
+ version = "2.178.0"
527
+ source = { registry = "https://pypi.org/simple" }
528
+ dependencies = [
529
+ { name = "google-api-core" },
530
+ { name = "google-auth" },
531
+ { name = "google-auth-httplib2" },
532
+ { name = "httplib2" },
533
+ { name = "uritemplate" },
534
+ ]
535
+ sdist = { url = "https://files.pythonhosted.org/packages/99/98/916385a87d145a27661b630c480fadf9db32bb1ad9fb1b13e8dbcbe2af70/google_api_python_client-2.178.0.tar.gz", hash = "sha256:99cba921eb471bb5973b780c653ac54d96eef8a42f1b7375b7ab98f257a4414c", size = 13282628, upload-time = "2025-08-06T14:04:51.062Z" }
536
+ wheels = [
537
+ { url = "https://files.pythonhosted.org/packages/29/34/8ae31410a2d3f28b16b7135931133caf759d3aa0653f8397e344acec5a88/google_api_python_client-2.178.0-py3-none-any.whl", hash = "sha256:f420adcd050150ff1baefa817e96e1ffa16872744f53471cd34096612e580c34", size = 13809959, upload-time = "2025-08-06T14:04:47.94Z" },
538
+ ]
539
+
540
+ [[package]]
541
+ name = "google-auth"
542
+ version = "2.40.3"
543
+ source = { registry = "https://pypi.org/simple" }
544
+ dependencies = [
545
+ { name = "cachetools" },
546
+ { name = "pyasn1-modules" },
547
+ { name = "rsa" },
548
+ ]
549
+ sdist = { url = "https://files.pythonhosted.org/packages/9e/9b/e92ef23b84fa10a64ce4831390b7a4c2e53c0132568d99d4ae61d04c8855/google_auth-2.40.3.tar.gz", hash = "sha256:500c3a29adedeb36ea9cf24b8d10858e152f2412e3ca37829b3fa18e33d63b77", size = 281029, upload-time = "2025-06-04T18:04:57.577Z" }
550
+ wheels = [
551
+ { url = "https://files.pythonhosted.org/packages/17/63/b19553b658a1692443c62bd07e5868adaa0ad746a0751ba62c59568cd45b/google_auth-2.40.3-py2.py3-none-any.whl", hash = "sha256:1370d4593e86213563547f97a92752fc658456fe4514c809544f330fed45a7ca", size = 216137, upload-time = "2025-06-04T18:04:55.573Z" },
552
+ ]
553
+
554
+ [[package]]
555
+ name = "google-auth-httplib2"
556
+ version = "0.2.0"
557
+ source = { registry = "https://pypi.org/simple" }
558
+ dependencies = [
559
+ { name = "google-auth" },
560
+ { name = "httplib2" },
561
+ ]
562
+ sdist = { url = "https://files.pythonhosted.org/packages/56/be/217a598a818567b28e859ff087f347475c807a5649296fb5a817c58dacef/google-auth-httplib2-0.2.0.tar.gz", hash = "sha256:38aa7badf48f974f1eb9861794e9c0cb2a0511a4ec0679b1f886d108f5640e05", size = 10842, upload-time = "2023-12-12T17:40:30.722Z" }
563
+ wheels = [
564
+ { url = "https://files.pythonhosted.org/packages/be/8a/fe34d2f3f9470a27b01c9e76226965863f153d5fbe276f83608562e49c04/google_auth_httplib2-0.2.0-py2.py3-none-any.whl", hash = "sha256:b65a0a2123300dd71281a7bf6e64d65a0759287df52729bdd1ae2e47dc311a3d", size = 9253, upload-time = "2023-12-12T17:40:13.055Z" },
565
+ ]
566
+
567
+ [[package]]
568
+ name = "google-generativeai"
569
+ version = "0.8.5"
570
+ source = { registry = "https://pypi.org/simple" }
571
+ dependencies = [
572
+ { name = "google-ai-generativelanguage" },
573
+ { name = "google-api-core" },
574
+ { name = "google-api-python-client" },
575
+ { name = "google-auth" },
576
+ { name = "protobuf" },
577
+ { name = "pydantic" },
578
+ { name = "tqdm" },
579
+ { name = "typing-extensions" },
580
+ ]
581
+ wheels = [
582
+ { url = "https://files.pythonhosted.org/packages/6e/40/c42ff9ded9f09ec9392879a8e6538a00b2dc185e834a3392917626255419/google_generativeai-0.8.5-py3-none-any.whl", hash = "sha256:22b420817fb263f8ed520b33285f45976d5b21e904da32b80d4fd20c055123a2", size = 155427, upload-time = "2025-04-17T00:40:00.67Z" },
583
+ ]
584
+
585
+ [[package]]
586
+ name = "googleapis-common-protos"
587
+ version = "1.70.0"
588
+ source = { registry = "https://pypi.org/simple" }
589
+ dependencies = [
590
+ { name = "protobuf" },
591
+ ]
592
+ sdist = { url = "https://files.pythonhosted.org/packages/39/24/33db22342cf4a2ea27c9955e6713140fedd51e8b141b5ce5260897020f1a/googleapis_common_protos-1.70.0.tar.gz", hash = "sha256:0e1b44e0ea153e6594f9f394fef15193a68aaaea2d843f83e2742717ca753257", size = 145903, upload-time = "2025-04-14T10:17:02.924Z" }
593
+ wheels = [
594
+ { url = "https://files.pythonhosted.org/packages/86/f1/62a193f0227cf15a920390abe675f386dec35f7ae3ffe6da582d3ade42c7/googleapis_common_protos-1.70.0-py3-none-any.whl", hash = "sha256:b8bfcca8c25a2bb253e0e0b0adaf8c00773e5e6af6fd92397576680b807e0fd8", size = 294530, upload-time = "2025-04-14T10:17:01.271Z" },
595
+ ]
596
+
597
  [[package]]
598
  name = "greenlet"
599
  version = "3.2.3"
 
627
  { url = "https://files.pythonhosted.org/packages/5c/4f/aab73ecaa6b3086a4c89863d94cf26fa84cbff63f52ce9bc4342b3087a06/greenlet-3.2.3-cp314-cp314-win_amd64.whl", hash = "sha256:8c47aae8fbbfcf82cc13327ae802ba13c9c36753b67e760023fd116bc124a62a", size = 301236, upload-time = "2025-06-05T16:15:20.111Z" },
628
  ]
629
 
630
+ [[package]]
631
+ name = "grpcio"
632
+ version = "1.74.0"
633
+ source = { registry = "https://pypi.org/simple" }
634
+ sdist = { url = "https://files.pythonhosted.org/packages/38/b4/35feb8f7cab7239c5b94bd2db71abb3d6adb5f335ad8f131abb6060840b6/grpcio-1.74.0.tar.gz", hash = "sha256:80d1f4fbb35b0742d3e3d3bb654b7381cd5f015f8497279a1e9c21ba623e01b1", size = 12756048, upload-time = "2025-07-24T18:54:23.039Z" }
635
+ wheels = [
636
+ { url = "https://files.pythonhosted.org/packages/4c/5d/e504d5d5c4469823504f65687d6c8fb97b7f7bf0b34873b7598f1df24630/grpcio-1.74.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:8533e6e9c5bd630ca98062e3a1326249e6ada07d05acf191a77bc33f8948f3d8", size = 5445551, upload-time = "2025-07-24T18:53:23.641Z" },
637
+ { url = "https://files.pythonhosted.org/packages/43/01/730e37056f96f2f6ce9f17999af1556df62ee8dab7fa48bceeaab5fd3008/grpcio-1.74.0-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:2918948864fec2a11721d91568effffbe0a02b23ecd57f281391d986847982f6", size = 10979810, upload-time = "2025-07-24T18:53:25.349Z" },
638
+ { url = "https://files.pythonhosted.org/packages/79/3d/09fd100473ea5c47083889ca47ffd356576173ec134312f6aa0e13111dee/grpcio-1.74.0-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:60d2d48b0580e70d2e1954d0d19fa3c2e60dd7cbed826aca104fff518310d1c5", size = 5941946, upload-time = "2025-07-24T18:53:27.387Z" },
639
+ { url = "https://files.pythonhosted.org/packages/8a/99/12d2cca0a63c874c6d3d195629dcd85cdf5d6f98a30d8db44271f8a97b93/grpcio-1.74.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3601274bc0523f6dc07666c0e01682c94472402ac2fd1226fd96e079863bfa49", size = 6621763, upload-time = "2025-07-24T18:53:29.193Z" },
640
+ { url = "https://files.pythonhosted.org/packages/9d/2c/930b0e7a2f1029bbc193443c7bc4dc2a46fedb0203c8793dcd97081f1520/grpcio-1.74.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:176d60a5168d7948539def20b2a3adcce67d72454d9ae05969a2e73f3a0feee7", size = 6180664, upload-time = "2025-07-24T18:53:30.823Z" },
641
+ { url = "https://files.pythonhosted.org/packages/db/d5/ff8a2442180ad0867717e670f5ec42bfd8d38b92158ad6bcd864e6d4b1ed/grpcio-1.74.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e759f9e8bc908aaae0412642afe5416c9f983a80499448fcc7fab8692ae044c3", size = 6301083, upload-time = "2025-07-24T18:53:32.454Z" },
642
+ { url = "https://files.pythonhosted.org/packages/b0/ba/b361d390451a37ca118e4ec7dccec690422e05bc85fba2ec72b06cefec9f/grpcio-1.74.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:9e7c4389771855a92934b2846bd807fc25a3dfa820fd912fe6bd8136026b2707", size = 6994132, upload-time = "2025-07-24T18:53:34.506Z" },
643
+ { url = "https://files.pythonhosted.org/packages/3b/0c/3a5fa47d2437a44ced74141795ac0251bbddeae74bf81df3447edd767d27/grpcio-1.74.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:cce634b10aeab37010449124814b05a62fb5f18928ca878f1bf4750d1f0c815b", size = 6489616, upload-time = "2025-07-24T18:53:36.217Z" },
644
+ { url = "https://files.pythonhosted.org/packages/ae/95/ab64703b436d99dc5217228babc76047d60e9ad14df129e307b5fec81fd0/grpcio-1.74.0-cp312-cp312-win32.whl", hash = "sha256:885912559974df35d92219e2dc98f51a16a48395f37b92865ad45186f294096c", size = 3807083, upload-time = "2025-07-24T18:53:37.911Z" },
645
+ { url = "https://files.pythonhosted.org/packages/84/59/900aa2445891fc47a33f7d2f76e00ca5d6ae6584b20d19af9c06fa09bf9a/grpcio-1.74.0-cp312-cp312-win_amd64.whl", hash = "sha256:42f8fee287427b94be63d916c90399ed310ed10aadbf9e2e5538b3e497d269bc", size = 4490123, upload-time = "2025-07-24T18:53:39.528Z" },
646
+ { url = "https://files.pythonhosted.org/packages/d4/d8/1004a5f468715221450e66b051c839c2ce9a985aa3ee427422061fcbb6aa/grpcio-1.74.0-cp313-cp313-linux_armv7l.whl", hash = "sha256:2bc2d7d8d184e2362b53905cb1708c84cb16354771c04b490485fa07ce3a1d89", size = 5449488, upload-time = "2025-07-24T18:53:41.174Z" },
647
+ { url = "https://files.pythonhosted.org/packages/94/0e/33731a03f63740d7743dced423846c831d8e6da808fcd02821a4416df7fa/grpcio-1.74.0-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:c14e803037e572c177ba54a3e090d6eb12efd795d49327c5ee2b3bddb836bf01", size = 10974059, upload-time = "2025-07-24T18:53:43.066Z" },
648
+ { url = "https://files.pythonhosted.org/packages/0d/c6/3d2c14d87771a421205bdca991467cfe473ee4c6a1231c1ede5248c62ab8/grpcio-1.74.0-cp313-cp313-manylinux_2_17_aarch64.whl", hash = "sha256:f6ec94f0e50eb8fa1744a731088b966427575e40c2944a980049798b127a687e", size = 5945647, upload-time = "2025-07-24T18:53:45.269Z" },
649
+ { url = "https://files.pythonhosted.org/packages/c5/83/5a354c8aaff58594eef7fffebae41a0f8995a6258bbc6809b800c33d4c13/grpcio-1.74.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:566b9395b90cc3d0d0c6404bc8572c7c18786ede549cdb540ae27b58afe0fb91", size = 6626101, upload-time = "2025-07-24T18:53:47.015Z" },
650
+ { url = "https://files.pythonhosted.org/packages/3f/ca/4fdc7bf59bf6994aa45cbd4ef1055cd65e2884de6113dbd49f75498ddb08/grpcio-1.74.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e1ea6176d7dfd5b941ea01c2ec34de9531ba494d541fe2057c904e601879f249", size = 6182562, upload-time = "2025-07-24T18:53:48.967Z" },
651
+ { url = "https://files.pythonhosted.org/packages/fd/48/2869e5b2c1922583686f7ae674937986807c2f676d08be70d0a541316270/grpcio-1.74.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:64229c1e9cea079420527fa8ac45d80fc1e8d3f94deaa35643c381fa8d98f362", size = 6303425, upload-time = "2025-07-24T18:53:50.847Z" },
652
+ { url = "https://files.pythonhosted.org/packages/a6/0e/bac93147b9a164f759497bc6913e74af1cb632c733c7af62c0336782bd38/grpcio-1.74.0-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:0f87bddd6e27fc776aacf7ebfec367b6d49cad0455123951e4488ea99d9b9b8f", size = 6996533, upload-time = "2025-07-24T18:53:52.747Z" },
653
+ { url = "https://files.pythonhosted.org/packages/84/35/9f6b2503c1fd86d068b46818bbd7329db26a87cdd8c01e0d1a9abea1104c/grpcio-1.74.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:3b03d8f2a07f0fea8c8f74deb59f8352b770e3900d143b3d1475effcb08eec20", size = 6491489, upload-time = "2025-07-24T18:53:55.06Z" },
654
+ { url = "https://files.pythonhosted.org/packages/75/33/a04e99be2a82c4cbc4039eb3a76f6c3632932b9d5d295221389d10ac9ca7/grpcio-1.74.0-cp313-cp313-win32.whl", hash = "sha256:b6a73b2ba83e663b2480a90b82fdae6a7aa6427f62bf43b29912c0cfd1aa2bfa", size = 3805811, upload-time = "2025-07-24T18:53:56.798Z" },
655
+ { url = "https://files.pythonhosted.org/packages/34/80/de3eb55eb581815342d097214bed4c59e806b05f1b3110df03b2280d6dfd/grpcio-1.74.0-cp313-cp313-win_amd64.whl", hash = "sha256:fd3c71aeee838299c5887230b8a1822795325ddfea635edd82954c1eaa831e24", size = 4489214, upload-time = "2025-07-24T18:53:59.771Z" },
656
+ ]
657
+
658
+ [[package]]
659
+ name = "grpcio-status"
660
+ version = "1.71.2"
661
+ source = { registry = "https://pypi.org/simple" }
662
+ dependencies = [
663
+ { name = "googleapis-common-protos" },
664
+ { name = "grpcio" },
665
+ { name = "protobuf" },
666
+ ]
667
+ sdist = { url = "https://files.pythonhosted.org/packages/fd/d1/b6e9877fedae3add1afdeae1f89d1927d296da9cf977eca0eb08fb8a460e/grpcio_status-1.71.2.tar.gz", hash = "sha256:c7a97e176df71cdc2c179cd1847d7fc86cca5832ad12e9798d7fed6b7a1aab50", size = 13677, upload-time = "2025-06-28T04:24:05.426Z" }
668
+ wheels = [
669
+ { url = "https://files.pythonhosted.org/packages/67/58/317b0134129b556a93a3b0afe00ee675b5657f0155509e22fcb853bafe2d/grpcio_status-1.71.2-py3-none-any.whl", hash = "sha256:803c98cb6a8b7dc6dbb785b1111aed739f241ab5e9da0bba96888aa74704cfd3", size = 14424, upload-time = "2025-06-28T04:23:42.136Z" },
670
+ ]
671
+
672
  [[package]]
673
  name = "h11"
674
  version = "0.16.0"
 
706
  { url = "https://files.pythonhosted.org/packages/7e/f5/f66802a942d491edb555dd61e3a9961140fd64c90bce1eafd741609d334d/httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55", size = 78784, upload-time = "2025-04-24T22:06:20.566Z" },
707
  ]
708
 
709
+ [[package]]
710
+ name = "httplib2"
711
+ version = "0.22.0"
712
+ source = { registry = "https://pypi.org/simple" }
713
+ dependencies = [
714
+ { name = "pyparsing" },
715
+ ]
716
+ sdist = { url = "https://files.pythonhosted.org/packages/3d/ad/2371116b22d616c194aa25ec410c9c6c37f23599dcd590502b74db197584/httplib2-0.22.0.tar.gz", hash = "sha256:d7a10bc5ef5ab08322488bde8c726eeee5c8618723fdb399597ec58f3d82df81", size = 351116, upload-time = "2023-03-21T22:29:37.214Z" }
717
+ wheels = [
718
+ { url = "https://files.pythonhosted.org/packages/a8/6c/d2fbdaaa5959339d53ba38e94c123e4e84b8fbc4b84beb0e70d7c1608486/httplib2-0.22.0-py3-none-any.whl", hash = "sha256:14ae0a53c1ba8f3d37e9e27cf37eabb0fb9980f435ba405d546948b009dd64dc", size = 96854, upload-time = "2023-03-21T22:29:35.683Z" },
719
+ ]
720
+
721
  [[package]]
722
  name = "httpx"
723
  version = "0.28.1"
 
1716
  { url = "https://files.pythonhosted.org/packages/ce/4f/5249960887b1fbe561d9ff265496d170b55a735b76724f10ef19f9e40716/prompt_toolkit-3.0.51-py3-none-any.whl", hash = "sha256:52742911fde84e2d423e2f9a4cf1de7d7ac4e51958f648d9540e0fb8db077b07", size = 387810, upload-time = "2025-04-15T09:18:44.753Z" },
1717
  ]
1718
 
1719
+ [[package]]
1720
+ name = "proto-plus"
1721
+ version = "1.26.1"
1722
+ source = { registry = "https://pypi.org/simple" }
1723
+ dependencies = [
1724
+ { name = "protobuf" },
1725
+ ]
1726
+ sdist = { url = "https://files.pythonhosted.org/packages/f4/ac/87285f15f7cce6d4a008f33f1757fb5a13611ea8914eb58c3d0d26243468/proto_plus-1.26.1.tar.gz", hash = "sha256:21a515a4c4c0088a773899e23c7bbade3d18f9c66c73edd4c7ee3816bc96a012", size = 56142, upload-time = "2025-03-10T15:54:38.843Z" }
1727
+ wheels = [
1728
+ { url = "https://files.pythonhosted.org/packages/4e/6d/280c4c2ce28b1593a19ad5239c8b826871fc6ec275c21afc8e1820108039/proto_plus-1.26.1-py3-none-any.whl", hash = "sha256:13285478c2dcf2abb829db158e1047e2f1e8d63a077d94263c2b88b043c75a66", size = 50163, upload-time = "2025-03-10T15:54:37.335Z" },
1729
+ ]
1730
+
1731
+ [[package]]
1732
+ name = "protobuf"
1733
+ version = "5.29.5"
1734
+ source = { registry = "https://pypi.org/simple" }
1735
+ sdist = { url = "https://files.pythonhosted.org/packages/43/29/d09e70352e4e88c9c7a198d5645d7277811448d76c23b00345670f7c8a38/protobuf-5.29.5.tar.gz", hash = "sha256:bc1463bafd4b0929216c35f437a8e28731a2b7fe3d98bb77a600efced5a15c84", size = 425226, upload-time = "2025-05-28T23:51:59.82Z" }
1736
+ wheels = [
1737
+ { url = "https://files.pythonhosted.org/packages/5f/11/6e40e9fc5bba02988a214c07cf324595789ca7820160bfd1f8be96e48539/protobuf-5.29.5-cp310-abi3-win32.whl", hash = "sha256:3f1c6468a2cfd102ff4703976138844f78ebd1fb45f49011afc5139e9e283079", size = 422963, upload-time = "2025-05-28T23:51:41.204Z" },
1738
+ { url = "https://files.pythonhosted.org/packages/81/7f/73cefb093e1a2a7c3ffd839e6f9fcafb7a427d300c7f8aef9c64405d8ac6/protobuf-5.29.5-cp310-abi3-win_amd64.whl", hash = "sha256:3f76e3a3675b4a4d867b52e4a5f5b78a2ef9565549d4037e06cf7b0942b1d3fc", size = 434818, upload-time = "2025-05-28T23:51:44.297Z" },
1739
+ { url = "https://files.pythonhosted.org/packages/dd/73/10e1661c21f139f2c6ad9b23040ff36fee624310dc28fba20d33fdae124c/protobuf-5.29.5-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:e38c5add5a311f2a6eb0340716ef9b039c1dfa428b28f25a7838ac329204a671", size = 418091, upload-time = "2025-05-28T23:51:45.907Z" },
1740
+ { url = "https://files.pythonhosted.org/packages/6c/04/98f6f8cf5b07ab1294c13f34b4e69b3722bb609c5b701d6c169828f9f8aa/protobuf-5.29.5-cp38-abi3-manylinux2014_aarch64.whl", hash = "sha256:fa18533a299d7ab6c55a238bf8629311439995f2e7eca5caaff08663606e9015", size = 319824, upload-time = "2025-05-28T23:51:47.545Z" },
1741
+ { url = "https://files.pythonhosted.org/packages/85/e4/07c80521879c2d15f321465ac24c70efe2381378c00bf5e56a0f4fbac8cd/protobuf-5.29.5-cp38-abi3-manylinux2014_x86_64.whl", hash = "sha256:63848923da3325e1bf7e9003d680ce6e14b07e55d0473253a690c3a8b8fd6e61", size = 319942, upload-time = "2025-05-28T23:51:49.11Z" },
1742
+ { url = "https://files.pythonhosted.org/packages/7e/cc/7e77861000a0691aeea8f4566e5d3aa716f2b1dece4a24439437e41d3d25/protobuf-5.29.5-py3-none-any.whl", hash = "sha256:6cf42630262c59b2d8de33954443d94b746c952b01434fc58a417fdbd2e84bd5", size = 172823, upload-time = "2025-05-28T23:51:58.157Z" },
1743
+ ]
1744
+
1745
  [[package]]
1746
  name = "psutil"
1747
  version = "7.0.0"
 
1775
  { url = "https://files.pythonhosted.org/packages/8e/37/efad0257dc6e593a18957422533ff0f87ede7c9c6ea010a2177d738fb82f/pure_eval-0.2.3-py3-none-any.whl", hash = "sha256:1db8e35b67b3d218d818ae653e27f06c3aa420901fa7b081ca98cbedc874e0d0", size = 11842, upload-time = "2024-07-21T12:58:20.04Z" },
1776
  ]
1777
 
1778
+ [[package]]
1779
+ name = "pyasn1"
1780
+ version = "0.6.1"
1781
+ source = { registry = "https://pypi.org/simple" }
1782
+ sdist = { url = "https://files.pythonhosted.org/packages/ba/e9/01f1a64245b89f039897cb0130016d79f77d52669aae6ee7b159a6c4c018/pyasn1-0.6.1.tar.gz", hash = "sha256:6f580d2bdd84365380830acf45550f2511469f673cb4a5ae3857a3170128b034", size = 145322, upload-time = "2024-09-10T22:41:42.55Z" }
1783
+ wheels = [
1784
+ { url = "https://files.pythonhosted.org/packages/c8/f1/d6a797abb14f6283c0ddff96bbdd46937f64122b8c925cab503dd37f8214/pyasn1-0.6.1-py3-none-any.whl", hash = "sha256:0d632f46f2ba09143da3a8afe9e33fb6f92fa2320ab7e886e2d0f7672af84629", size = 83135, upload-time = "2024-09-11T16:00:36.122Z" },
1785
+ ]
1786
+
1787
+ [[package]]
1788
+ name = "pyasn1-modules"
1789
+ version = "0.4.2"
1790
+ source = { registry = "https://pypi.org/simple" }
1791
+ dependencies = [
1792
+ { name = "pyasn1" },
1793
+ ]
1794
+ sdist = { url = "https://files.pythonhosted.org/packages/e9/e6/78ebbb10a8c8e4b61a59249394a4a594c1a7af95593dc933a349c8d00964/pyasn1_modules-0.4.2.tar.gz", hash = "sha256:677091de870a80aae844b1ca6134f54652fa2c8c5a52aa396440ac3106e941e6", size = 307892, upload-time = "2025-03-28T02:41:22.17Z" }
1795
+ wheels = [
1796
+ { url = "https://files.pythonhosted.org/packages/47/8d/d529b5d697919ba8c11ad626e835d4039be708a35b0d22de83a269a6682c/pyasn1_modules-0.4.2-py3-none-any.whl", hash = "sha256:29253a9207ce32b64c3ac6600edc75368f98473906e8fd1043bd6b5b1de2c14a", size = 181259, upload-time = "2025-03-28T02:41:19.028Z" },
1797
+ ]
1798
+
1799
  [[package]]
1800
  name = "pycparser"
1801
  version = "2.22"
 
1871
  { url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" },
1872
  ]
1873
 
1874
+ [[package]]
1875
+ name = "pyparsing"
1876
+ version = "3.2.3"
1877
+ source = { registry = "https://pypi.org/simple" }
1878
+ sdist = { url = "https://files.pythonhosted.org/packages/bb/22/f1129e69d94ffff626bdb5c835506b3a5b4f3d070f17ea295e12c2c6f60f/pyparsing-3.2.3.tar.gz", hash = "sha256:b9c13f1ab8b3b542f72e28f634bad4de758ab3ce4546e4301970ad6fa77c38be", size = 1088608, upload-time = "2025-03-25T05:01:28.114Z" }
1879
+ wheels = [
1880
+ { url = "https://files.pythonhosted.org/packages/05/e7/df2285f3d08fee213f2d041540fa4fc9ca6c2d44cf36d3a035bf2a8d2bcc/pyparsing-3.2.3-py3-none-any.whl", hash = "sha256:a749938e02d6fd0b59b356ca504a24982314bb090c383e3cf201c95ef7e2bfcf", size = 111120, upload-time = "2025-03-25T05:01:24.908Z" },
1881
+ ]
1882
+
1883
  [[package]]
1884
  name = "pytest"
1885
  version = "8.4.1"
 
1908
  { url = "https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427", size = 229892, upload-time = "2024-03-01T18:36:18.57Z" },
1909
  ]
1910
 
1911
+ [[package]]
1912
+ name = "python-dotenv"
1913
+ version = "1.1.1"
1914
+ source = { registry = "https://pypi.org/simple" }
1915
+ sdist = { url = "https://files.pythonhosted.org/packages/f6/b0/4bc07ccd3572a2f9df7e6782f52b0c6c90dcbb803ac4a167702d7d0dfe1e/python_dotenv-1.1.1.tar.gz", hash = "sha256:a8a6399716257f45be6a007360200409fce5cda2661e3dec71d23dc15f6189ab", size = 41978, upload-time = "2025-06-24T04:21:07.341Z" }
1916
+ wheels = [
1917
+ { url = "https://files.pythonhosted.org/packages/5f/ed/539768cf28c661b5b068d66d96a2f155c4971a5d55684a514c1a0e0dec2f/python_dotenv-1.1.1-py3-none-any.whl", hash = "sha256:31f23644fe2602f88ff55e1f5c79ba497e01224ee7737937930c448e4d0e24dc", size = 20556, upload-time = "2025-06-24T04:21:06.073Z" },
1918
+ ]
1919
+
1920
  [[package]]
1921
  name = "python-json-logger"
1922
  version = "3.3.0"
 
2222
  { url = "https://files.pythonhosted.org/packages/75/04/5302cea1aa26d886d34cadbf2dc77d90d7737e576c0065f357b96dc7a1a6/rpds_py-0.26.0-cp314-cp314t-win_amd64.whl", hash = "sha256:f14440b9573a6f76b4ee4770c13f0b5921f71dde3b6fcb8dabbefd13b7fe05d7", size = 232821, upload-time = "2025-07-01T15:55:55.167Z" },
2223
  ]
2224
 
2225
+ [[package]]
2226
+ name = "rsa"
2227
+ version = "4.9.1"
2228
+ source = { registry = "https://pypi.org/simple" }
2229
+ dependencies = [
2230
+ { name = "pyasn1" },
2231
+ ]
2232
+ sdist = { url = "https://files.pythonhosted.org/packages/da/8a/22b7beea3ee0d44b1916c0c1cb0ee3af23b700b6da9f04991899d0c555d4/rsa-4.9.1.tar.gz", hash = "sha256:e7bdbfdb5497da4c07dfd35530e1a902659db6ff241e39d9953cad06ebd0ae75", size = 29034, upload-time = "2025-04-16T09:51:18.218Z" }
2233
+ wheels = [
2234
+ { url = "https://files.pythonhosted.org/packages/64/8d/0133e4eb4beed9e425d9a98ed6e081a55d195481b7632472be1af08d2f6b/rsa-4.9.1-py3-none-any.whl", hash = "sha256:68635866661c6836b8d39430f97a996acbd61bfa49406748ea243539fe239762", size = 34696, upload-time = "2025-04-16T09:51:17.142Z" },
2235
+ ]
2236
+
2237
  [[package]]
2238
  name = "ruff"
2239
  version = "0.12.7"
 
2685
  { url = "https://files.pythonhosted.org/packages/e7/00/3fca040d7cf8a32776d3d81a00c8ee7457e00f80c649f1e4a863c8321ae9/uri_template-1.3.0-py3-none-any.whl", hash = "sha256:a44a133ea12d44a0c0f06d7d42a52d71282e77e2f937d8abd5655b8d56fc1363", size = 11140, upload-time = "2023-06-21T01:49:03.467Z" },
2686
  ]
2687
 
2688
+ [[package]]
2689
+ name = "uritemplate"
2690
+ version = "4.2.0"
2691
+ source = { registry = "https://pypi.org/simple" }
2692
+ sdist = { url = "https://files.pythonhosted.org/packages/98/60/f174043244c5306c9988380d2cb10009f91563fc4b31293d27e17201af56/uritemplate-4.2.0.tar.gz", hash = "sha256:480c2ed180878955863323eea31b0ede668795de182617fef9c6ca09e6ec9d0e", size = 33267, upload-time = "2025-06-02T15:12:06.318Z" }
2693
+ wheels = [
2694
+ { url = "https://files.pythonhosted.org/packages/a9/99/3ae339466c9183ea5b8ae87b34c0b897eda475d2aec2307cae60e5cd4f29/uritemplate-4.2.0-py3-none-any.whl", hash = "sha256:962201ba1c4edcab02e60f9a0d3821e82dfc5d2d6662a21abd533879bdb8a686", size = 11488, upload-time = "2025-06-02T15:12:03.405Z" },
2695
+ ]
2696
+
2697
  [[package]]
2698
  name = "urllib3"
2699
  version = "2.5.0"