mishrabp commited on
Commit
1981cf8
·
verified ·
1 Parent(s): 4a6e95b

Upload folder using huggingface_hub

Browse files
Files changed (40) hide show
  1. common/aagents/core/__init__.py +4 -0
  2. common/aagents/core/model.py +36 -0
  3. common/aagents/google_agent.py +3 -20
  4. common/aagents/healthcare_agent.py +4 -29
  5. common/aagents/news_agent.py +3 -20
  6. common/aagents/search_agent.py +3 -15
  7. common/aagents/weather_agent.py +3 -25
  8. common/aagents/web_agent.py +4 -30
  9. common/aagents/web_research_agent.py +2 -26
  10. common/aagents/yf_agent.py +4 -20
  11. common/mcp/tools/google_tools.py +4 -2
  12. common/mcp/tools/news_tools.py +4 -2
  13. common/mcp/tools/rag_tool.py +2 -15
  14. common/mcp/tools/search_tools.py +22 -5
  15. common/mcp/tools/time_tools.py +2 -0
  16. common/mcp/tools/weather_tools.py +4 -2
  17. common/mcp/tools/yf_tools.py +4 -2
  18. common/rag/rag.py +1 -1
  19. common/utility/autogen_model_factory.py +88 -0
  20. common/utility/bkp/embedding_factory.py +49 -0
  21. common/utility/bkp/llm_factory.py +130 -0
  22. common/utility/bkp/llm_factory2.py +75 -0
  23. common/utility/langchain_model_factory.py +70 -0
  24. common/utility/logger.py +1 -1
  25. common/utility/model_factory_notused.py +302 -0
  26. common/utility/openai_model_factory.py +179 -0
  27. pyproject.toml +3 -2
  28. run.py +7 -1
  29. src/deep-research/app.py +11 -27
  30. src/deep-research/appagents/email_agent.py +4 -5
  31. src/deep-research/appagents/guardrail_agent.py +3 -4
  32. src/deep-research/appagents/orchestrator.py +20 -10
  33. src/deep-research/appagents/planner_agent.py +5 -17
  34. src/deep-research/appagents/search_agent.py +7 -33
  35. src/deep-research/appagents/writer_agent.py +8 -19
  36. src/deep-research/core/__init__.py +4 -0
  37. src/deep-research/core/model.py +36 -0
  38. src/deep-research/tools/google_tools.py +1 -5
  39. src/deep-research/tools/time_tools.py +3 -1
  40. uv.lock +871 -108
common/aagents/core/__init__.py ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+
2
+ from .model import get_model_client
3
+
4
+ __all__ = ["get_model_client"]
common/aagents/core/model.py ADDED
@@ -0,0 +1,36 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from common.utility.openai_model_factory import OpenAIModelFactory
2
+
3
+ def get_model_client(provider:str = "openai"):
4
+ if provider.lower() == "google":
5
+ return OpenAIModelFactory.get_model(
6
+ provider="google",
7
+ model_name="gemini-2.5-flash",
8
+ temperature=0
9
+ )
10
+ elif provider.lower() == "openai":
11
+ return OpenAIModelFactory.get_model(
12
+ provider="openai",
13
+ model_name="gpt-4.1-mini",
14
+ temperature=0
15
+ )
16
+ elif provider.lower() == "azure":
17
+ return OpenAIModelFactory.get_model(
18
+ provider="azure",
19
+ model_name="gpt-4o-mini",
20
+ temperature=0
21
+ )
22
+ elif provider.lower() == "groq":
23
+ return OpenAIModelFactory.get_model(
24
+ provider="groq",
25
+ model_name="gpt-4o-mini",
26
+ temperature=0
27
+ )
28
+ elif provider.lower() == "ollama":
29
+ return OpenAIModelFactory.get_model(
30
+ provider="ollama",
31
+ model_name="gpt-4o-mini",
32
+ temperature=0
33
+ )
34
+ else:
35
+ raise ValueError(f"Unsupported provider: {provider}")
36
+
common/aagents/google_agent.py CHANGED
@@ -1,30 +1,13 @@
1
  """Google search agent module for web search and information retrieval."""
2
- import os
3
- from agents import Agent, OpenAIChatCompletionsModel
4
- from dotenv import load_dotenv
5
  from common.mcp.tools.google_tools import google_search, google_search_recent
6
  from common.mcp.tools.search_tools import duckduckgo_search, fetch_page_content
7
  from common.mcp.tools.time_tools import current_datetime
8
- from openai import AsyncOpenAI
9
-
10
- # ---------------------------------------------------------
11
- # Load environment variables
12
- # ---------------------------------------------------------
13
- load_dotenv()
14
-
15
- GEMINI_BASE_URL = "https://generativelanguage.googleapis.com/v1beta/openai/"
16
- google_api_key = os.getenv('GOOGLE_API_KEY')
17
- gemini_client = AsyncOpenAI(base_url=GEMINI_BASE_URL, api_key=google_api_key)
18
- gemini_model = OpenAIChatCompletionsModel(model="gemini-2.0-flash-exp", openai_client=gemini_client)
19
-
20
- GROQ_BASE_URL = "https://api.groq.com/openai/v1"
21
- groq_api_key = os.getenv('GROQ_API_KEY')
22
- groq_client = AsyncOpenAI(base_url=GROQ_BASE_URL, api_key=groq_api_key)
23
- groq_model = OpenAIChatCompletionsModel(model="groq/compound", openai_client=groq_client)
24
 
25
  google_agent = Agent(
26
  name="GoogleSearchAgent",
27
- model=gemini_model,
28
  tools=[current_datetime, google_search, google_search_recent, duckduckgo_search, fetch_page_content],
29
  instructions="""
30
  You are a GoogleSearchAgent specialized in finding and retrieving information from the web.
 
1
  """Google search agent module for web search and information retrieval."""
2
+ from agents import Agent
 
 
3
  from common.mcp.tools.google_tools import google_search, google_search_recent
4
  from common.mcp.tools.search_tools import duckduckgo_search, fetch_page_content
5
  from common.mcp.tools.time_tools import current_datetime
6
+ from .core.model import get_model_client
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
7
 
8
  google_agent = Agent(
9
  name="GoogleSearchAgent",
10
+ model=get_model_client(),
11
  tools=[current_datetime, google_search, google_search_recent, duckduckgo_search, fetch_page_content],
12
  instructions="""
13
  You are a GoogleSearchAgent specialized in finding and retrieving information from the web.
common/aagents/healthcare_agent.py CHANGED
@@ -1,42 +1,17 @@
1
  """Healthcare RAG Agent - Combines RAG retrieval with web search for comprehensive medical information."""
2
- import os
3
- from agents import Agent, OpenAIChatCompletionsModel
4
- from dotenv import load_dotenv
5
- from openai import AsyncOpenAI
6
- from langsmith import wrappers
7
-
8
-
9
- # Import tools
10
  from common.mcp.tools.rag_tool import rag_search, UserContext
11
  from common.mcp.tools.search_tools import duckduckgo_search, fetch_page_content
12
  from common.mcp.tools.time_tools import current_datetime
13
-
14
-
15
- # ---------------------------------------------------------
16
- # Load environment variables
17
- # ---------------------------------------------------------
18
- load_dotenv()
19
-
20
- # ---------------------------------------------------------
21
- # Model Configuration
22
- # ---------------------------------------------------------
23
- GEMINI_BASE_URL = "https://generativelanguage.googleapis.com/v1beta/openai/"
24
- google_api_key = os.getenv('GOOGLE_API_KEY')
25
- gemini_client = AsyncOpenAI(base_url=GEMINI_BASE_URL, api_key=google_api_key)
26
- gemini_client = wrappers.wrap_openai(gemini_client)
27
- gemini_model = OpenAIChatCompletionsModel(model="gemini-2.0-flash-exp", openai_client=gemini_client)
28
-
29
- GROQ_BASE_URL = "https://api.groq.com/openai/v1"
30
- groq_api_key = os.getenv('GROQ_API_KEY')
31
- groq_client = AsyncOpenAI(base_url=GROQ_BASE_URL, api_key=groq_api_key)
32
- groq_model = OpenAIChatCompletionsModel(model="groq/compound", openai_client=groq_client)
33
 
34
  # ---------------------------------------------------------
35
  # Healthcare RAG Agent
36
  # ---------------------------------------------------------
37
  healthcare_agent = Agent[UserContext](
38
  name="HealthcareRAGAgent",
39
- model=gemini_model,
40
  tools=[rag_search, duckduckgo_search, fetch_page_content],
41
  instructions="""
42
  You are a healthcare information retrieval agent. You retrieve information from tools and synthesize it into well-formatted markdown responses.
 
1
  """Healthcare RAG Agent - Combines RAG retrieval with web search for comprehensive medical information."""
2
+ from agents import Agent
 
 
 
 
 
 
 
3
  from common.mcp.tools.rag_tool import rag_search, UserContext
4
  from common.mcp.tools.search_tools import duckduckgo_search, fetch_page_content
5
  from common.mcp.tools.time_tools import current_datetime
6
+ from .core.model import get_model_client
7
+
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
8
 
9
  # ---------------------------------------------------------
10
  # Healthcare RAG Agent
11
  # ---------------------------------------------------------
12
  healthcare_agent = Agent[UserContext](
13
  name="HealthcareRAGAgent",
14
+ model=get_model_client(),
15
  tools=[rag_search, duckduckgo_search, fetch_page_content],
16
  instructions="""
17
  You are a healthcare information retrieval agent. You retrieve information from tools and synthesize it into well-formatted markdown responses.
common/aagents/news_agent.py CHANGED
@@ -1,29 +1,12 @@
1
  """News agent module for fetching and analyzing news articles."""
2
- import os
3
- from agents import Agent, OpenAIChatCompletionsModel
4
- from dotenv import load_dotenv
5
  from common.mcp.tools.news_tools import get_top_headlines, search_news, get_news_by_category
6
  from common.mcp.tools.time_tools import current_datetime
7
- from openai import AsyncOpenAI
8
-
9
- # ---------------------------------------------------------
10
- # Load environment variables
11
- # ---------------------------------------------------------
12
- load_dotenv()
13
-
14
- GEMINI_BASE_URL = "https://generativelanguage.googleapis.com/v1beta/openai/"
15
- google_api_key = os.getenv('GOOGLE_API_KEY')
16
- gemini_client = AsyncOpenAI(base_url=GEMINI_BASE_URL, api_key=google_api_key)
17
- gemini_model = OpenAIChatCompletionsModel(model="gemini-2.0-flash-exp", openai_client=gemini_client)
18
-
19
- GROQ_BASE_URL = "https://api.groq.com/openai/v1"
20
- groq_api_key = os.getenv('GROQ_API_KEY')
21
- groq_client = AsyncOpenAI(base_url=GROQ_BASE_URL, api_key=groq_api_key)
22
- groq_model = OpenAIChatCompletionsModel(model="groq/compound", openai_client=groq_client)
23
 
24
  news_agent = Agent(
25
  name="NewsAgent",
26
- model=gemini_model,
27
  tools=[current_datetime, get_top_headlines, search_news, get_news_by_category],
28
  instructions="""
29
  You are a NewsAgent specialized in fetching and analyzing recent news articles and headlines.
 
1
  """News agent module for fetching and analyzing news articles."""
2
+ from agents import Agent
 
 
3
  from common.mcp.tools.news_tools import get_top_headlines, search_news, get_news_by_category
4
  from common.mcp.tools.time_tools import current_datetime
5
+ from .core.model import get_model_client
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
6
 
7
  news_agent = Agent(
8
  name="NewsAgent",
9
+ model=get_model_client(),
10
  tools=[current_datetime, get_top_headlines, search_news, get_news_by_category],
11
  instructions="""
12
  You are a NewsAgent specialized in fetching and analyzing recent news articles and headlines.
common/aagents/search_agent.py CHANGED
@@ -1,24 +1,12 @@
1
  """Search agent module for comprehensive web searches."""
2
- import os
3
- from agents import Agent, OpenAIChatCompletionsModel
4
- from openai import AsyncOpenAI
5
- from dotenv import load_dotenv
6
  from common.mcp.tools.search_tools import duckduckgo_search, fetch_page_content
7
  from common.mcp.tools.time_tools import current_datetime
8
-
9
- # ---------------------------------------------------------
10
- # Load environment variables
11
- # ---------------------------------------------------------
12
- load_dotenv()
13
-
14
- GEMINI_BASE_URL = "https://generativelanguage.googleapis.com/v1beta/openai/"
15
- google_api_key = os.getenv('GOOGLE_API_KEY')
16
- gemini_client = AsyncOpenAI(base_url=GEMINI_BASE_URL, api_key=google_api_key)
17
- gemini_model = OpenAIChatCompletionsModel(model="gemini-2.0-flash-exp", openai_client=gemini_client)
18
 
19
  search_agent = Agent(
20
  name="Web Search Agent",
21
- model=gemini_model,
22
  tools=[current_datetime, duckduckgo_search, fetch_page_content],
23
  instructions="""
24
  You are a highly efficient and specialized **Web Search Agent** 🌐. Your sole function is to retrieve and analyze information from the internet using the **duckduckgo_search** and **fetch_page_content** functions. You must act as a digital librarian and researcher, providing synthesized, cited, and up-to-date answers.
 
1
  """Search agent module for comprehensive web searches."""
2
+ from agents import Agent
 
 
 
3
  from common.mcp.tools.search_tools import duckduckgo_search, fetch_page_content
4
  from common.mcp.tools.time_tools import current_datetime
5
+ from .core.model import get_model_client
 
 
 
 
 
 
 
 
 
6
 
7
  search_agent = Agent(
8
  name="Web Search Agent",
9
+ model=get_model_client(),
10
  tools=[current_datetime, duckduckgo_search, fetch_page_content],
11
  instructions="""
12
  You are a highly efficient and specialized **Web Search Agent** 🌐. Your sole function is to retrieve and analyze information from the internet using the **duckduckgo_search** and **fetch_page_content** functions. You must act as a digital librarian and researcher, providing synthesized, cited, and up-to-date answers.
common/aagents/weather_agent.py CHANGED
@@ -1,37 +1,15 @@
1
  """Web search agent module for internet queries."""
2
  import os
3
  from agents import Agent
4
- from dotenv import load_dotenv
5
  from pydantic import BaseModel, Field
6
  from common.mcp.tools.weather_tools import get_weather_forecast, search_weather_fallback_ddgs, search_weather_fallback_bs
7
  from common.mcp.tools.time_tools import current_datetime
8
- from agents import Agent, OpenAIChatCompletionsModel
9
- from openai import AsyncOpenAI
10
-
11
- # ---------------------------------------------------------
12
- # Load environment variables
13
- # ---------------------------------------------------------
14
- load_dotenv()
15
-
16
- ################################
17
- # Learning: gemini models struggles to construct the output_type when it's a Pydantic model.
18
- # So we use list[dict] as output_type instead of list[searchResult].
19
- # Then in the calling code, we can convert dicts back to searchResult models if needed.
20
- ################################
21
-
22
- GEMINI_BASE_URL = "https://generativelanguage.googleapis.com/v1beta/openai/"
23
- google_api_key = os.getenv('GOOGLE_API_KEY')
24
- gemini_client = AsyncOpenAI(base_url=GEMINI_BASE_URL, api_key=google_api_key)
25
- gemini_model = OpenAIChatCompletionsModel(model="gemini-flash-latest", openai_client=gemini_client)
26
-
27
- GROQ_BASE_URL = "https://api.groq.com/openai/v1"
28
- groq_api_key = os.getenv('GROQ_API_KEY')
29
- groq_client = AsyncOpenAI(base_url=GROQ_BASE_URL, api_key=groq_api_key)
30
- groq_model = OpenAIChatCompletionsModel(model="groq/compound", openai_client=groq_client)
31
 
32
  weather_agent = Agent(
33
  name="WeatherAgent",
34
- model=gemini_model, #"gpt-4o-mini",
35
  tools=[current_datetime, get_weather_forecast, search_weather_fallback_ddgs, search_weather_fallback_bs],
36
  instructions="""
37
  You are a Weather Forecast agent who forecasts weather information ONLY.
 
1
  """Web search agent module for internet queries."""
2
  import os
3
  from agents import Agent
 
4
  from pydantic import BaseModel, Field
5
  from common.mcp.tools.weather_tools import get_weather_forecast, search_weather_fallback_ddgs, search_weather_fallback_bs
6
  from common.mcp.tools.time_tools import current_datetime
7
+ from .core.model import get_model_client
8
+
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
9
 
10
  weather_agent = Agent(
11
  name="WeatherAgent",
12
+ model=get_model_client(),
13
  tools=[current_datetime, get_weather_forecast, search_weather_fallback_ddgs, search_weather_fallback_bs],
14
  instructions="""
15
  You are a Weather Forecast agent who forecasts weather information ONLY.
common/aagents/web_agent.py CHANGED
@@ -1,37 +1,11 @@
1
  """Web search agent module for internet queries."""
2
- import os
3
- from agents import AgentOutputSchema, function_tool, Agent
4
- from dotenv import load_dotenv
5
- from pydantic import BaseModel, Field
6
  from common.mcp.tools.search_tools import duckduckgo_search, searchQuery, searchResult
7
- from agents import Agent, OpenAIChatCompletionsModel
8
- from openai import AsyncOpenAI
9
-
10
- # ---------------------------------------------------------
11
- # Load environment variables
12
- # ---------------------------------------------------------
13
- load_dotenv()
14
-
15
- ################################
16
- # Learning: gemini models struggles to construct the output_type when it's a Pydantic model.
17
- # So we use list[dict] as output_type instead of list[searchResult].
18
- # Then in the calling code, we can convert dicts back to searchResult models if needed.
19
- ################################
20
-
21
- GEMINI_BASE_URL = "https://generativelanguage.googleapis.com/v1beta/openai/"
22
- google_api_key = os.getenv('GOOGLE_API_KEY')
23
- gemini_client = AsyncOpenAI(base_url=GEMINI_BASE_URL, api_key=google_api_key)
24
- gemini_model = OpenAIChatCompletionsModel(model="gemini-2.0-flash-exp", openai_client=gemini_client)
25
-
26
- GROQ_BASE_URL = "https://api.groq.com/openai/v1"
27
- groq_api_key = os.getenv('GROQ_API_KEY')
28
- groq_client = AsyncOpenAI(base_url=GROQ_BASE_URL, api_key=groq_api_key)
29
- groq_model = OpenAIChatCompletionsModel(model="groq/compound", openai_client=groq_client)
30
-
31
  web_agent = Agent(
32
  name="WebAgent",
33
- model="gpt-4o-mini",
34
- # description="An agent that can perform web searches using DuckDuckGo.",
35
  tools=[duckduckgo_search],
36
  instructions="""
37
  You are a WebAgent that can perform web searches to find information on the internet.
 
1
  """Web search agent module for internet queries."""
2
+ from agents import Agent
 
 
 
3
  from common.mcp.tools.search_tools import duckduckgo_search, searchQuery, searchResult
4
+ from .core.model import get_model_client
5
+
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
6
  web_agent = Agent(
7
  name="WebAgent",
8
+ model=get_model_client(),
 
9
  tools=[duckduckgo_search],
10
  instructions="""
11
  You are a WebAgent that can perform web searches to find information on the internet.
common/aagents/web_research_agent.py CHANGED
@@ -1,35 +1,11 @@
1
  """Web search agent module for internet queries."""
2
- import os
3
  from agents import AgentOutputSchema, function_tool, Agent
4
- from dotenv import load_dotenv
5
  from pydantic import BaseModel, Field
6
  from common.mcp.tools.search_tools import duckduckgo_search, searchQuery, searchResult, fetch_page_content
7
- from agents import Agent, OpenAIChatCompletionsModel
8
- from openai import AsyncOpenAI
9
-
10
- # ---------------------------------------------------------
11
- # Load environment variables
12
- # ---------------------------------------------------------
13
- load_dotenv()
14
-
15
- ################################
16
- # Learning: gemini models struggles to construct the output_type when it's a Pydantic model.
17
- # So we use list[dict] as output_type instead of list[searchResult].
18
- # Then in the calling code, we can convert dicts back to searchResult models if needed.
19
- ################################
20
-
21
- GEMINI_BASE_URL = "https://generativelanguage.googleapis.com/v1beta/openai/"
22
- google_api_key = os.getenv('GOOGLE_API_KEY')
23
- gemini_client = AsyncOpenAI(base_url=GEMINI_BASE_URL, api_key=google_api_key)
24
- gemini_model = OpenAIChatCompletionsModel(model="gemini-2.0-flash-exp", openai_client=gemini_client)
25
-
26
- GROQ_BASE_URL = "https://api.groq.com/openai/v1"
27
- groq_api_key = os.getenv('GROQ_API_KEY')
28
- groq_client = AsyncOpenAI(base_url=GROQ_BASE_URL, api_key=groq_api_key)
29
- groq_model = OpenAIChatCompletionsModel(model="groq/compound", openai_client=groq_client)
30
 
31
  web_research_agent = Agent(
32
- model="gpt-4o-mini",
33
  tools=[duckduckgo_search, fetch_page_content],
34
  instructions="""
35
  You are WebResearchAgent — an advanced internet research assistant with two core abilities:
 
1
  """Web search agent module for internet queries."""
 
2
  from agents import AgentOutputSchema, function_tool, Agent
 
3
  from pydantic import BaseModel, Field
4
  from common.mcp.tools.search_tools import duckduckgo_search, searchQuery, searchResult, fetch_page_content
5
+ from .core.model import get_model_client
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
6
 
7
  web_research_agent = Agent(
8
+ model=get_model_client(),
9
  tools=[duckduckgo_search, fetch_page_content],
10
  instructions="""
11
  You are WebResearchAgent — an advanced internet research assistant with two core abilities:
common/aagents/yf_agent.py CHANGED
@@ -1,29 +1,13 @@
1
  """Yahoo Finance agent module for financial analysis and market research."""
2
- import os
3
- from agents import Agent, OpenAIChatCompletionsModel
4
- from dotenv import load_dotenv
5
  from common.mcp.tools.yf_tools import get_summary, get_market_sentiment, get_history, get_analyst_recommendations, get_earnings_calendar
6
  from common.mcp.tools.time_tools import current_datetime
7
- from openai import AsyncOpenAI
8
 
9
- # ---------------------------------------------------------
10
- # Load environment variables
11
- # ---------------------------------------------------------
12
- load_dotenv()
13
 
14
- GEMINI_BASE_URL = "https://generativelanguage.googleapis.com/v1beta/openai/"
15
- google_api_key = os.getenv('GOOGLE_API_KEY')
16
- gemini_client = AsyncOpenAI(base_url=GEMINI_BASE_URL, api_key=google_api_key)
17
- gemini_model = OpenAIChatCompletionsModel(model="gemini-2.0-flash-exp", openai_client=gemini_client)
18
-
19
- GROQ_BASE_URL = "https://api.groq.com/openai/v1"
20
- groq_api_key = os.getenv('GROQ_API_KEY')
21
- groq_client = AsyncOpenAI(base_url=GROQ_BASE_URL, api_key=groq_api_key)
22
- groq_model = OpenAIChatCompletionsModel(model="groq/compound", openai_client=groq_client)
23
-
24
- yf_agent = Agent(
25
  name="YahooFinanceAgent",
26
- model=gemini_model,
27
  tools=[current_datetime, get_summary, get_market_sentiment, get_history, get_analyst_recommendations, get_earnings_calendar],
28
  instructions="""
29
  You are a specialized **Financial Analysis Agent** 💰, expert in market research, financial data retrieval, and market analysis.
 
1
  """Yahoo Finance agent module for financial analysis and market research."""
2
+ from agents import Agent
 
 
3
  from common.mcp.tools.yf_tools import get_summary, get_market_sentiment, get_history, get_analyst_recommendations, get_earnings_calendar
4
  from common.mcp.tools.time_tools import current_datetime
5
+ from .core.model import get_model_client
6
 
 
 
 
 
7
 
8
+ yf_agent = Agent(
 
 
 
 
 
 
 
 
 
 
9
  name="YahooFinanceAgent",
10
+ model=get_model_client(),
11
  tools=[current_datetime, get_summary, get_market_sentiment, get_history, get_analyst_recommendations, get_earnings_calendar],
12
  instructions="""
13
  You are a specialized **Financial Analysis Agent** 💰, expert in market research, financial data retrieval, and market analysis.
common/mcp/tools/google_tools.py CHANGED
@@ -1,13 +1,15 @@
1
  import os
2
  import requests
3
- from dotenv import load_dotenv
4
  from agents import function_tool
5
  from typing import Optional
6
 
 
 
7
  # ---------------------------------------------------------
8
  # Load environment variables
9
  # ---------------------------------------------------------
10
- load_dotenv()
11
 
12
  # ============================================================
13
  # 🔹 GOOGLE SEARCH TOOLSET (Serper.dev API)
 
1
  import os
2
  import requests
3
+
4
  from agents import function_tool
5
  from typing import Optional
6
 
7
+
8
+
9
  # ---------------------------------------------------------
10
  # Load environment variables
11
  # ---------------------------------------------------------
12
+
13
 
14
  # ============================================================
15
  # 🔹 GOOGLE SEARCH TOOLSET (Serper.dev API)
common/mcp/tools/news_tools.py CHANGED
@@ -1,14 +1,16 @@
1
  import os
2
  import requests
3
- from dotenv import load_dotenv
4
  from agents import function_tool
5
  from typing import Optional
6
  import datetime
7
 
 
 
8
  # ---------------------------------------------------------
9
  # Load environment variables
10
  # ---------------------------------------------------------
11
- load_dotenv()
12
 
13
  # ============================================================
14
  # 🔹 NEWS TOOLSET (NewsAPI.org)
 
1
  import os
2
  import requests
3
+
4
  from agents import function_tool
5
  from typing import Optional
6
  import datetime
7
 
8
+
9
+
10
  # ---------------------------------------------------------
11
  # Load environment variables
12
  # ---------------------------------------------------------
13
+
14
 
15
  # ============================================================
16
  # 🔹 NEWS TOOLSET (NewsAPI.org)
common/mcp/tools/rag_tool.py CHANGED
@@ -2,10 +2,9 @@
2
  import os
3
  from pathlib import Path
4
  from agents import function_tool, RunContextWrapper
5
- from dotenv import load_dotenv
6
- from rag.rag import Retriever
7
- from dataclasses import dataclass
8
 
 
 
9
 
10
  @dataclass
11
  class UserContext:
@@ -15,18 +14,6 @@ class UserContext:
15
  similarity_threshold: float = 0.4 # FAISS L2 distance threshold for RAG relevance
16
 
17
 
18
- # ---------------------------------------------------------
19
- # Load environment variables
20
- # ---------------------------------------------------------
21
- load_dotenv()
22
-
23
- # ---------------------------------------------------------
24
- # Initialize RAG Retriever
25
- # ---------------------------------------------------------
26
- # Get the healthcare-rag-chatbot directory path
27
- # healthcare_dir = str(Path(__file__).parent.parent.parent)
28
- # retriever = None
29
-
30
  # ---------------------------------------------------------
31
  # RAG Search Tool
32
  # ---------------------------------------------------------
 
2
  import os
3
  from pathlib import Path
4
  from agents import function_tool, RunContextWrapper
 
 
 
5
 
6
+ from common.rag.rag import Retriever
7
+ from dataclasses import dataclass
8
 
9
  @dataclass
10
  class UserContext:
 
14
  similarity_threshold: float = 0.4 # FAISS L2 distance threshold for RAG relevance
15
 
16
 
 
 
 
 
 
 
 
 
 
 
 
 
17
  # ---------------------------------------------------------
18
  # RAG Search Tool
19
  # ---------------------------------------------------------
common/mcp/tools/search_tools.py CHANGED
@@ -1,15 +1,17 @@
1
  import requests
2
  from ddgs import DDGS
3
  from agents import function_tool
4
- from dotenv import load_dotenv
5
  from pydantic import BaseModel, Field
6
  from bs4 import BeautifulSoup
7
  from typing import Optional
8
 
 
 
9
  # ---------------------------------------------------------
10
  # Load environment variables
11
  # ---------------------------------------------------------
12
- load_dotenv()
13
 
14
  # ---------------------- MODELS ---------------------------
15
  class searchQuery(BaseModel):
@@ -118,8 +120,23 @@ def _duckduckgo_search(params: searchQuery) -> list[dict]:
118
  return results
119
 
120
  @function_tool
121
- def duckduckgo_search(params: searchQuery) -> list[dict]:
122
- """Perform a DuckDuckGo search and return only snippets.
123
- No page content fetched here."""
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
124
  return _duckduckgo_search(params)
125
 
 
1
  import requests
2
  from ddgs import DDGS
3
  from agents import function_tool
4
+
5
  from pydantic import BaseModel, Field
6
  from bs4 import BeautifulSoup
7
  from typing import Optional
8
 
9
+
10
+
11
  # ---------------------------------------------------------
12
  # Load environment variables
13
  # ---------------------------------------------------------
14
+
15
 
16
  # ---------------------- MODELS ---------------------------
17
  class searchQuery(BaseModel):
 
120
  return results
121
 
122
  @function_tool
123
+ def duckduckgo_search(query: str, max_results: int = 5, search_type: str = "text", timelimit: str = "d", region: str = "us-en") -> list[dict]:
124
+ """
125
+ Perform a DuckDuckGo search and return only snippets.
126
+
127
+ Args:
128
+ query: The search query string.
129
+ max_results: The maximum number of search results to return (default: 5).
130
+ search_type: Search type: 'text' (default) or 'news'. Use 'news' to get publication dates.
131
+ timelimit: Time limit for search results: 'd' (day), 'w' (week), 'm' (month), 'y' (year).
132
+ region: Region for search results (e.g., 'us-en').
133
+ """
134
+ params = searchQuery(
135
+ query=query,
136
+ max_results=max_results,
137
+ search_type=search_type,
138
+ timelimit=timelimit,
139
+ region=region
140
+ )
141
  return _duckduckgo_search(params)
142
 
common/mcp/tools/time_tools.py CHANGED
@@ -2,6 +2,8 @@ from datetime import datetime
2
  from agents import function_tool
3
  # from ..common.utility.logger import log_call
4
 
 
 
5
  @function_tool
6
  # @log_call
7
  def current_datetime(format: str = "natural") -> str:
 
2
  from agents import function_tool
3
  # from ..common.utility.logger import log_call
4
 
5
+
6
+
7
  @function_tool
8
  # @log_call
9
  def current_datetime(format: str = "natural") -> str:
common/mcp/tools/weather_tools.py CHANGED
@@ -2,16 +2,18 @@ import os
2
  import re
3
  import requests
4
  import datetime
5
- from dotenv import load_dotenv
6
  from typing import Optional
7
 
8
  from ddgs import DDGS
9
  from agents import function_tool
10
 
 
 
11
  # ---------------------------------------------------------
12
  # Load environment variables
13
  # ---------------------------------------------------------
14
- load_dotenv()
15
 
16
  @function_tool
17
  def get_weather_forecast(city: str, date: Optional[str] = None) -> str:
 
2
  import re
3
  import requests
4
  import datetime
5
+
6
  from typing import Optional
7
 
8
  from ddgs import DDGS
9
  from agents import function_tool
10
 
11
+
12
+
13
  # ---------------------------------------------------------
14
  # Load environment variables
15
  # ---------------------------------------------------------
16
+
17
 
18
  @function_tool
19
  def get_weather_forecast(city: str, date: Optional[str] = None) -> str:
common/mcp/tools/yf_tools.py CHANGED
@@ -1,12 +1,14 @@
1
  import os
2
  import requests
3
  import yfinance as yf
4
- from dotenv import load_dotenv
5
  from agents import function_tool
6
  from datetime import datetime, timedelta
7
 
 
 
8
  # Load environment variables
9
- load_dotenv()
10
 
11
 
12
  # ============================================================
 
1
  import os
2
  import requests
3
  import yfinance as yf
4
+
5
  from agents import function_tool
6
  from datetime import datetime, timedelta
7
 
8
+
9
+
10
  # Load environment variables
11
+
12
 
13
 
14
  # ============================================================
common/rag/rag.py CHANGED
@@ -17,7 +17,7 @@ class Retriever:
17
  self.embeddings = HuggingFaceEmbeddings(model_name="sentence-transformers/all-MiniLM-L6-v2")
18
  self.text_splitter = RecursiveCharacterTextSplitter(
19
  chunk_size=1024,
20
- chunk_overlap=200,
21
  length_function=len,
22
  # separators=["\n\n", "\n", ".", "!", "?", ",", " ", ""],
23
  is_separator_regex=False,
 
17
  self.embeddings = HuggingFaceEmbeddings(model_name="sentence-transformers/all-MiniLM-L6-v2")
18
  self.text_splitter = RecursiveCharacterTextSplitter(
19
  chunk_size=1024,
20
+ chunk_overlap=300,
21
  length_function=len,
22
  # separators=["\n\n", "\n", ".", "!", "?", ",", " ", ""],
23
  is_separator_regex=False,
common/utility/autogen_model_factory.py ADDED
@@ -0,0 +1,88 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ from azure.identity import DefaultAzureCredential, get_bearer_token_provider
3
+
4
+ class AutoGenModelFactory:
5
+ """
6
+ Factory for creating AutoGen compatible model instances.
7
+ """
8
+
9
+ @staticmethod
10
+ def get_model(provider: str = "azure", # azure, openai, google, groq, ollama
11
+ model_name: str = "gpt-4o",
12
+ temperature: float = 0,
13
+ model_info: dict = None
14
+ ):
15
+ """
16
+ Returns an AutoGen OpenAIChatCompletionClient instance.
17
+ """
18
+
19
+ # Lazy import to avoid dependency issues if autogen is not installed
20
+ try:
21
+ from autogen_ext.models.openai import OpenAIChatCompletionClient
22
+ except ImportError as e:
23
+ raise ImportError("AutoGen libraries (autogen-agentchat, autogen-ext[openai]) are not installed.") from e
24
+
25
+ # ----------------------------------------------------------------------
26
+ # AZURE
27
+ # ----------------------------------------------------------------------
28
+ if provider.lower() == "azure":
29
+ token_provider = get_bearer_token_provider(
30
+ DefaultAzureCredential(), "https://cognitiveservices.azure.com/.default"
31
+ )
32
+ return OpenAIChatCompletionClient(
33
+ model=model_name,
34
+ azure_endpoint=os.environ["AZURE_OPENAI_API_URI"],
35
+ api_version=os.environ["AZURE_OPENAI_API_VERSION"],
36
+ azure_ad_token_provider=token_provider,
37
+ temperature=temperature,
38
+ )
39
+
40
+ # ----------------------------------------------------------------------
41
+ # OPENAI
42
+ # ----------------------------------------------------------------------
43
+ elif provider.lower() == "openai":
44
+ return OpenAIChatCompletionClient(
45
+ model=model_name,
46
+ api_key=os.environ["OPENAI_API_KEY"],
47
+ temperature=temperature,
48
+ )
49
+
50
+ # ----------------------------------------------------------------------
51
+ # GOOGLE (GEMINI) via OpenAI Compat
52
+ # ----------------------------------------------------------------------
53
+ elif provider.lower() == "google" or provider.lower() == "gemini":
54
+ return OpenAIChatCompletionClient(
55
+ model=model_name,
56
+ base_url="https://generativelanguage.googleapis.com/v1beta/openai/",
57
+ api_key=os.environ["GOOGLE_API_KEY"],
58
+ model_info=model_info, # Pass full model_info for capabilities
59
+ temperature=temperature,
60
+ )
61
+
62
+ # ----------------------------------------------------------------------
63
+ # GROQ
64
+ # ----------------------------------------------------------------------
65
+ elif provider.lower() == "groq":
66
+ return OpenAIChatCompletionClient(
67
+ model=model_name,
68
+ base_url="https://api.groq.com/openai/v1",
69
+ api_key=os.environ["GROQ_API_KEY"],
70
+ temperature=temperature,
71
+ )
72
+
73
+ # ----------------------------------------------------------------------
74
+ # OLLAMA
75
+ # ----------------------------------------------------------------------
76
+ elif provider.lower() == "ollama":
77
+ # Ensure model_info defaults to empty dict if None
78
+ info = model_info if model_info is not None else {}
79
+ return OpenAIChatCompletionClient(
80
+ model=model_name,
81
+ base_url="http://localhost:11434/v1",
82
+ api_key="ollama", # dummy key
83
+ model_info=info,
84
+ temperature=temperature,
85
+ )
86
+
87
+ else:
88
+ raise ValueError(f"Unsupported AutoGen provider: {provider}")
common/utility/bkp/embedding_factory.py ADDED
@@ -0,0 +1,49 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ from typing import Union
3
+ # from azure.identity import DefaultAzureCredential
4
+ from langchain_openai import AzureOpenAIEmbeddings, OpenAIEmbeddings
5
+ from langchain_ollama import OllamaEmbeddings
6
+ from langchain_huggingface import HuggingFaceEmbeddings
7
+
8
+
9
+ class EmbeddingFactory:
10
+ """
11
+ A static utility class to create and return LLM Embedding instances based on the input type.
12
+ """
13
+
14
+ @staticmethod
15
+ def get_llm(llm_type: str) -> Union[AzureOpenAIEmbeddings, OpenAIEmbeddings]:
16
+ """
17
+ Returns an LLM instance based on the specified type.
18
+
19
+ Parameters:
20
+ llm_type (str): The type of LLM to return. Valid values are 'azure' or 'openai'.
21
+
22
+ Returns:
23
+ Union[AzureOpenAIEmbeddings, OpenAIEmbeddings]: The LLM instance.
24
+ """
25
+ if llm_type.lower() == "azure":
26
+ # Get the Azure Credential
27
+ # credential = DefaultAzureCredential()
28
+ # token=credential.get_token("https://cognitiveservices.azure.com/.default").token
29
+
30
+ # if not token:
31
+ # raise ValueError("Token is required for AzureOpenAIEmbeddings.")
32
+ # return AzureOpenAIEmbeddings(
33
+ # azure_endpoint=os.environ["AZURE_OPENAI_API_URI"],
34
+ # azure_deployment="text-embedding-3-small", #os.environ["AZURE_OPENAI_API_BASE_MODEL"],
35
+ # api_version=os.environ["AZURE_OPENAI_API_VERSION"],
36
+ # api_key=token
37
+ # )
38
+ pass
39
+ elif llm_type.lower() == "openai":
40
+ return OpenAIEmbeddings(
41
+ api_key=os.environ["OPENAI_API_KEY"],
42
+ model="text-embedding-3-large"
43
+ )
44
+ elif llm_type.lower() == "ollama": # must have ollama running locally with the following model
45
+ return OllamaEmbeddings(model="gemma:2b")
46
+ elif llm_type.lower() == "hf": # must have key update in env:HF_TOKEN
47
+ return HuggingFaceEmbeddings(model_name="all-MiniLM-L6-v2")
48
+ else:
49
+ raise ValueError("Invalid llm_type. Use 'azure' or 'openai'.")
common/utility/bkp/llm_factory.py ADDED
@@ -0,0 +1,130 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import tiktoken
3
+ from typing import Any
4
+ from langchain_openai.chat_models import ChatOpenAI, AzureChatOpenAI
5
+ from langchain_openai.embeddings import AzureOpenAIEmbeddings, OpenAIEmbeddings
6
+ # from azure.identity import DefaultAzureCredential
7
+ from huggingface_hub import login
8
+ from langchain_huggingface import ChatHuggingFace, HuggingFaceEmbeddings
9
+ from langchain_ollama import ChatOllama, OllamaEmbeddings
10
+ from langchain_groq import ChatGroq
11
+ # from langchain_openai import OpenAIEmbeddings
12
+
13
+ class LLMFactory:
14
+ """
15
+ Factory class to provide LLM and embedding model instances for different providers.
16
+ """
17
+
18
+ @staticmethod
19
+ def get_llm(provider: str, **kwargs) -> Any:
20
+ """
21
+ Returns a chat/completion LLM instance based on the provider.
22
+ Supported providers: openai, azureopenai, huggingface, ollama, groq
23
+ """
24
+ if provider == "openai":
25
+ # OpenAI Chat Model
26
+ return ChatOpenAI(
27
+ openai_api_key=kwargs.get("api_key", os.environ.get("OPENAI_API_KEY")),
28
+ model_name=kwargs.get("model_name", "gpt-4")
29
+ )
30
+
31
+ # elif provider == "azureopenai":
32
+ # # Azure OpenAI Chat Model using Azure Identity for token
33
+ # credential = DefaultAzureCredential()
34
+ # token = credential.get_token("https://cognitiveservices.azure.com/.default").token
35
+ # if not token:
36
+ # raise ValueError("Token is required for AzureChatOpenAI.")
37
+ # return AzureChatOpenAI(
38
+ # azure_endpoint=kwargs["endpoint"],
39
+ # azure_deployment=kwargs.get("deployment_name", "gpt-4"),
40
+ # api_version=kwargs["api_version"],
41
+ # api_key=token
42
+ # )
43
+
44
+ # pip install langchain langchain-huggingface huggingface_hub
45
+ elif provider == "huggingface":
46
+ # If using a private model or endpoint, authenticate
47
+ login(token=kwargs.get("api_key", os.environ.get("HF_TOKEN")))
48
+
49
+ return ChatHuggingFace(
50
+ repo_id=kwargs.get("model_name", "mistralai/Mistral-Nemo-Instruct-2407"), # Or any other chat-friendly model
51
+ task="text-generation",
52
+ model_kwargs={
53
+ "temperature": 0.7,
54
+ "max_new_tokens": 256
55
+ }
56
+ )
57
+
58
+ elif provider == "ollama":
59
+ # Ollama local model
60
+ return ChatOllama(
61
+ model=kwargs.get("model_name", "gemma:2b"),
62
+ temperature=0
63
+ )
64
+
65
+ elif provider == "groq":
66
+ # Groq LLM
67
+ return ChatGroq(
68
+ model=kwargs.get("model_name", "Gemma2-9b-It"),
69
+ max_tokens=512,
70
+ api_key=kwargs.get("api_key", os.environ.get("GROQ_API_KEY"))
71
+ )
72
+
73
+ else:
74
+ raise ValueError(f"Unsupported provider: {provider}")
75
+
76
+ @staticmethod
77
+ def get_embedding_model(provider: str, **kwargs) -> Any:
78
+ """
79
+ Returns an embedding model instance based on the provider.
80
+ Supported providers: openai, huggingface
81
+ """
82
+ if provider == "openai":
83
+ return OpenAIEmbeddings(
84
+ model=kwargs.get("model_name", "text-embedding-3-large"),
85
+ openai_api_key=kwargs.get("api_key", os.environ.get("OPENAI_API_KEY"))
86
+ )
87
+ # if provider == "azureopenai":
88
+ # # Get the Azure Credential
89
+ # credential = DefaultAzureCredential()
90
+ # token=credential.get_token("https://cognitiveservices.azure.com/.default").token
91
+
92
+ # if not token:
93
+ # raise ValueError("Token is required for AzureOpenAIEmbeddings.")
94
+ # return AzureOpenAIEmbeddings(
95
+ # azure_endpoint=os.environ["AZURE_OPENAI_API_URI"],
96
+ # azure_deployment=kwargs.get("azure_deployment", "text-embedding-3-large"),
97
+ # api_version=os.environ["AZURE_OPENAI_API_VERSION"],
98
+ # api_key=token
99
+ # )
100
+ elif provider == "huggingface":
101
+ # If using a private model or endpoint, authenticate
102
+ login(token=kwargs.get("api_key", os.environ.get("HF_TOKEN")))
103
+
104
+ return HuggingFaceEmbeddings(
105
+ model_name=kwargs.get("model_name", "all-MiniLM-L6-v2")
106
+ )
107
+ elif provider == "groq":
108
+ raise ValueError(f"No embedding support from the provider: {provider}")
109
+ elif provider == "ollama":
110
+ return OllamaEmbeddings(model=kwargs.get("model_name", "gemma:2b"))
111
+ else:
112
+ raise ValueError(f"Unsupported embedding provider: {provider}")
113
+
114
+ @staticmethod
115
+ def num_tokens_from_messages(messages) -> int:
116
+ """
117
+ Return the number of tokens used by a list of messages.
118
+ Adapted from the OpenAI cookbook token counter.
119
+ """
120
+ encoding = tiktoken.encoding_for_model("gpt-3.5-turbo")
121
+ tokens_per_message = 3 # <|start|>, role, <|end|>
122
+ num_tokens = 0
123
+
124
+ for message in messages:
125
+ num_tokens += tokens_per_message
126
+ for key, value in message.items():
127
+ num_tokens += len(encoding.encode(value))
128
+
129
+ num_tokens += 3 # every reply is primed with <|start|>assistant<|message|>
130
+ return num_tokens
common/utility/bkp/llm_factory2.py ADDED
@@ -0,0 +1,75 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import tiktoken
3
+ from typing import Union
4
+ # from azure.identity import DefaultAzureCredential
5
+ from langchain_openai.chat_models import AzureChatOpenAI, ChatOpenAI
6
+
7
+
8
+ class LLMFactory:
9
+ """
10
+ A static utility class to create and return LLM instances based on the input type.
11
+ """
12
+
13
+ @staticmethod
14
+ def get_llm(llm_type: str) -> Union[AzureChatOpenAI, ChatOpenAI]:
15
+ """
16
+ Returns an LLM instance based on the specified type.
17
+
18
+ Parameters:
19
+ llm_type (str): The type of LLM to return. Valid values are 'azure' or 'openai'.
20
+
21
+ Returns:
22
+ Union[AzureChatOpenAI, ChatOpenAI]: The LLM instance.
23
+ """
24
+ if llm_type.lower() == "azure":
25
+ # # Get the Azure Credential
26
+ # credential = DefaultAzureCredential()
27
+ # token=credential.get_token("https://cognitiveservices.azure.com/.default").token
28
+
29
+ # if not token:
30
+ # raise ValueError("Token is required for AzureChatOpenAI.")
31
+ # return AzureChatOpenAI(
32
+ # azure_endpoint=os.environ["AZURE_OPENAI_API_URI"],
33
+ # azure_deployment=os.environ["AZURE_OPENAI_API_BASE_MODEL"],
34
+ # api_version=os.environ["AZURE_OPENAI_API_VERSION"],
35
+ # api_key=token
36
+ # )
37
+ pass
38
+ elif llm_type.lower() == "openai":
39
+ return ChatOpenAI(
40
+ api_key=os.environ["OPENAI_API_KEY"],
41
+ model_name="gpt-4"
42
+ )
43
+ elif llm_type.lower() == "openai_chat":
44
+ return ChatOpenAI(
45
+ api_key=os.environ["OPENAI_API_KEY"],
46
+ model_name="gpt-4"
47
+ )
48
+ else:
49
+ raise ValueError("Invalid llm_type. Use 'azure' or 'openai'.")
50
+
51
+ @staticmethod
52
+ def num_tokens_from_messages(messages):
53
+
54
+ """
55
+ Return the number of tokens used by a list of messages.
56
+ Adapted from the Open AI cookbook token counter
57
+ """
58
+
59
+ encoding = tiktoken.encoding_for_model("gpt-3.5-turbo")
60
+
61
+ # Each message is sandwiched with <|start|>role and <|end|>
62
+ # Hence, messages look like: <|start|>system or user or assistant{message}<|end|>
63
+
64
+ tokens_per_message = 3 # token1:<|start|>, token2:system(or user or assistant), token3:<|end|>
65
+
66
+ num_tokens = 0
67
+
68
+ for message in messages:
69
+ num_tokens += tokens_per_message
70
+ for key, value in message.items():
71
+ num_tokens += len(encoding.encode(value))
72
+
73
+ num_tokens += 3 # every reply is primed with <|start|>assistant<|message|>
74
+
75
+ return num_tokens
common/utility/langchain_model_factory.py ADDED
@@ -0,0 +1,70 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ from azure.identity import DefaultAzureCredential, get_bearer_token_provider
3
+ from langchain_openai import AzureChatOpenAI, ChatOpenAI
4
+ from langchain_huggingface import ChatHuggingFace, HuggingFaceEndpoint
5
+ from langchain_ollama import ChatOllama
6
+ from huggingface_hub import login
7
+
8
+ class LangChainModelFactory:
9
+ """
10
+ Factory for creating LangChain compatible model instances.
11
+ """
12
+
13
+ @staticmethod
14
+ def get_model(provider: str = "openai", # openai, azure, huggingface, ollama
15
+ model_name: str = "gpt-4o",
16
+ temperature: float = 0
17
+ ):
18
+ """
19
+ Returns a LangChain LLM instance.
20
+ """
21
+
22
+ # ----------------------------------------------------------------------
23
+ # AZURE
24
+ # ----------------------------------------------------------------------
25
+ if provider.lower() == "azure":
26
+ token_provider = get_bearer_token_provider(
27
+ DefaultAzureCredential(), "https://cognitiveservices.azure.com/.default"
28
+ )
29
+ return AzureChatOpenAI(
30
+ azure_endpoint=os.environ["AZURE_OPENAI_API_URI"],
31
+ azure_deployment=os.environ["AZURE_OPENAI_API_BASE_MODEL"], # Or specific model_name if deployment matches
32
+ api_version=os.environ["AZURE_OPENAI_API_VERSION"],
33
+ azure_ad_token_provider=token_provider,
34
+ model_name=model_name,
35
+ temperature=temperature,
36
+ )
37
+
38
+ # ----------------------------------------------------------------------
39
+ # OPENAI
40
+ # ----------------------------------------------------------------------
41
+ elif provider.lower() == "openai":
42
+ return ChatOpenAI(
43
+ api_key=os.environ["OPENAI_API_KEY"],
44
+ model_name=model_name,
45
+ temperature=temperature,
46
+ )
47
+
48
+ # ----------------------------------------------------------------------
49
+ # HUGGING FACE
50
+ # ----------------------------------------------------------------------
51
+ elif provider.lower() == "huggingface":
52
+ if os.environ.get("HF_TOKEN"):
53
+ login(token=os.environ.get("HF_TOKEN"))
54
+ llm = HuggingFaceEndpoint(
55
+ repo_id=model_name,
56
+ task="text-generation",
57
+ temperature=temperature,
58
+ max_new_tokens=512,
59
+ huggingfacehub_api_token=os.environ.get("HF_TOKEN")
60
+ )
61
+ return ChatHuggingFace(llm=llm)
62
+
63
+ # ----------------------------------------------------------------------
64
+ # OLLAMA
65
+ # ----------------------------------------------------------------------
66
+ elif provider.lower() == "ollama":
67
+ return ChatOllama(model=model_name, temperature=temperature)
68
+
69
+ else:
70
+ raise ValueError(f"Unsupported LangChain provider: {provider}")
common/utility/logger.py CHANGED
@@ -14,7 +14,7 @@ def log_call(func):
14
  print(f"[{timestamp}] 🚀 Calling: {func.__name__}({arg_list})")
15
  try:
16
  result = func(*args, **kwargs)
17
- print(f"[{timestamp}] ✅ Finished: {func.__name__}")
18
  return result
19
  except Exception as e:
20
  print(f"[{timestamp}] ❌ Error in {func.__name__}: {e}")
 
14
  print(f"[{timestamp}] 🚀 Calling: {func.__name__}({arg_list})")
15
  try:
16
  result = func(*args, **kwargs)
17
+ # print(f"[{timestamp}] ✅ Finished: {func.__name__}")
18
  return result
19
  except Exception as e:
20
  print(f"[{timestamp}] ❌ Error in {func.__name__}: {e}")
common/utility/model_factory_notused.py ADDED
@@ -0,0 +1,302 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import tiktoken
3
+ from typing import Union
4
+ from azure.identity import DefaultAzureCredential, get_bearer_token_provider
5
+ from langchain_openai import AzureChatOpenAI, ChatOpenAI, AzureOpenAIEmbeddings, OpenAIEmbeddings
6
+ from agents import OpenAIChatCompletionsModel
7
+ from openai import AsyncOpenAI, AsyncAzureOpenAI
8
+ from huggingface_hub import login
9
+ from langchain_huggingface import ChatHuggingFace, HuggingFaceEndpoint, HuggingFaceEmbeddings
10
+ from langchain_ollama import ChatOllama, OllamaEmbeddings
11
+
12
+
13
+ class ModelFactory:
14
+ """
15
+ A static utility class to create and return LLM instances based on the input type.
16
+ """
17
+
18
+ @staticmethod
19
+ def get_model(framework: str = "openai-sdk-agent", # openai-sdk-agent, langchain, autogen
20
+ provider: str = "openai", # openai, azure, google, groq, huggingface, ollama
21
+ model_name: str = "gpt-4o-mini", # gpt-4o-mini, gemini-flash-1.5, groq/compound
22
+ model_info: dict = None, # additional info (e.g. backend provider for autogen/langchain)
23
+ temperature: float = 0
24
+ ) -> Union[AzureChatOpenAI, ChatOpenAI, OpenAIChatCompletionsModel, ChatHuggingFace, ChatOllama]:
25
+ """
26
+ Returns an LLM instance based on the specified parameters.
27
+
28
+ Parameters:
29
+ framework (str): The framework to use ('langchain', 'openai-sdk-agent', 'autogen').
30
+ provider (str): The model provider ('openai', 'azure', 'google', 'groq', 'huggingface', 'ollama').
31
+ model_name (str): The specific model name.
32
+ model_info (dict): Additional model info.
33
+ temperature (float): The temperature for generation (default 0).
34
+
35
+ Returns:
36
+ Union[...]: The model instance.
37
+ """
38
+
39
+ # ----------------------------------------------------------------------
40
+ # AUTOGEN SUPPORT
41
+ # ----------------------------------------------------------------------
42
+ if framework.lower() == "autogen":
43
+ # Lazy import to avoid dependency issues if autogen is not installed
44
+ try:
45
+ from autogen_ext.models.openai import OpenAIChatCompletionClient
46
+ except ImportError as e:
47
+ raise ImportError("AutoGen libraries (autogen-agentchat, autogen-ext[openai]) are not installed.") from e
48
+
49
+ # Azure Backend
50
+ if provider.lower() == "azure":
51
+ token_provider = get_bearer_token_provider(
52
+ DefaultAzureCredential(), "https://cognitiveservices.azure.com/.default"
53
+ )
54
+ return OpenAIChatCompletionClient(
55
+ model=model_name,
56
+ azure_endpoint=os.environ["AZURE_OPENAI_API_URI"],
57
+ api_version=os.environ["AZURE_OPENAI_API_VERSION"],
58
+ azure_ad_token_provider=token_provider,
59
+ temperature=temperature,
60
+ )
61
+
62
+ # OpenAI Backend
63
+ elif provider.lower() == "openai":
64
+ return OpenAIChatCompletionClient(
65
+ model=model_name,
66
+ api_key=os.environ["OPENAI_API_KEY"],
67
+ temperature=temperature,
68
+ )
69
+
70
+ # Google Backend (Gemini via OpenAI compat)
71
+ elif provider.lower() == "google" or provider.lower() == "gemini":
72
+ return OpenAIChatCompletionClient(
73
+ model=model_name,
74
+ base_url="https://generativelanguage.googleapis.com/v1beta/openai/",
75
+ api_key=os.environ["GOOGLE_API_KEY"],
76
+ model_info=model_info, # Pass full model_info for capabilities
77
+ temperature=temperature,
78
+ )
79
+
80
+ # Groq Backend
81
+ elif provider.lower() == "groq":
82
+ return OpenAIChatCompletionClient(
83
+ model=model_name,
84
+ base_url="https://api.groq.com/openai/v1",
85
+ api_key=os.environ["GROQ_API_KEY"],
86
+ temperature=temperature,
87
+ )
88
+
89
+ # Ollama Backend
90
+ elif provider.lower() == "ollama":
91
+ # Ensure model_info defaults to empty dict if None
92
+ info = model_info if model_info is not None else {}
93
+ return OpenAIChatCompletionClient(
94
+ model=model_name,
95
+ base_url="http://localhost:11434/v1",
96
+ api_key="ollama", # dummy key
97
+ model_info=info,
98
+ temperature=temperature,
99
+ )
100
+
101
+ else:
102
+ raise ValueError(f"Unsupported AutoGen provider: {provider}")
103
+
104
+ # ----------------------------------------------------------------------
105
+ # LANGCHAIN SUPPORT
106
+ # ----------------------------------------------------------------------
107
+ elif framework.lower() == "langchain":
108
+
109
+ if provider.lower() == "azure":
110
+ token_provider = get_bearer_token_provider(
111
+ DefaultAzureCredential(), "https://cognitiveservices.azure.com/.default"
112
+ )
113
+ return AzureChatOpenAI(
114
+ azure_endpoint=os.environ["AZURE_OPENAI_API_URI"],
115
+ azure_deployment=os.environ["AZURE_OPENAI_API_BASE_MODEL"],
116
+ api_version=os.environ["AZURE_OPENAI_API_VERSION"],
117
+ azure_ad_token_provider=token_provider,
118
+ model_name=model_name,
119
+ temperature=temperature,
120
+ )
121
+
122
+ elif provider.lower() == "openai":
123
+ return ChatOpenAI(
124
+ api_key=os.environ["OPENAI_API_KEY"],
125
+ model_name=model_name,
126
+ temperature=temperature,
127
+ )
128
+
129
+ elif provider.lower() == "huggingface":
130
+ if os.environ.get("HF_TOKEN"):
131
+ login(token=os.environ.get("HF_TOKEN"))
132
+ llm = HuggingFaceEndpoint(
133
+ repo_id=model_name,
134
+ task="text-generation",
135
+ temperature=temperature,
136
+ max_new_tokens=512,
137
+ huggingfacehub_api_token=os.environ.get("HF_TOKEN")
138
+ )
139
+ return ChatHuggingFace(llm=llm)
140
+
141
+ elif provider.lower() == "ollama":
142
+ return ChatOllama(model=model_name, temperature=temperature)
143
+
144
+ else:
145
+ raise ValueError(f"Unsupported LangChain provider: {provider}")
146
+
147
+ # ----------------------------------------------------------------------
148
+ # STANDARD LOGIC (Agents Lib / OpenAI SDK)
149
+ # ----------------------------------------------------------------------
150
+ elif framework.lower() == "openai-sdk-agent" or framework.lower() == "openai-sdk" or framework.lower() == "openai":
151
+
152
+ if provider.lower() == "azure":
153
+ token_provider = get_bearer_token_provider(
154
+ DefaultAzureCredential(), "https://cognitiveservices.azure.com/.default"
155
+ )
156
+ client = AsyncAzureOpenAI(
157
+ azure_endpoint=os.environ["AZURE_OPENAI_API_URI"],
158
+ api_version=os.environ["AZURE_OPENAI_API_VERSION"],
159
+ azure_ad_token_provider=token_provider,
160
+ )
161
+ return OpenAIChatCompletionsModel(model=model_name, openai_client=client)
162
+
163
+ elif provider.lower() == "openai":
164
+ client = AsyncOpenAI(api_key=os.environ["OPENAI_API_KEY"])
165
+ return OpenAIChatCompletionsModel(model=model_name, openai_client=client)
166
+
167
+ elif provider.lower() == "google":
168
+ GEMINI_BASE_URL = "https://generativelanguage.googleapis.com/v1beta/openai/"
169
+ client = AsyncOpenAI(
170
+ base_url=GEMINI_BASE_URL,
171
+ api_key=os.environ["GOOGLE_API_KEY"]
172
+ )
173
+ return OpenAIChatCompletionsModel(model=model_name, openai_client=client)
174
+
175
+ elif provider.lower() == "groq":
176
+ GROQ_BASE_URL = "https://api.groq.com/openai/v1"
177
+ client = AsyncOpenAI(
178
+ base_url=GROQ_BASE_URL,
179
+ api_key=os.environ["GROQ_API_KEY"]
180
+ )
181
+ return OpenAIChatCompletionsModel(model=model_name, openai_client=client)
182
+
183
+ elif provider.lower() == "ollama":
184
+ client = AsyncOpenAI(
185
+ base_url="http://localhost:11434/v1",
186
+ api_key="ollama"
187
+ )
188
+ return OpenAIChatCompletionsModel(model=model_name, openai_client=client)
189
+
190
+ elif provider.lower() == "huggingface":
191
+ # Agents lib doesn't have native HF support in the same way
192
+ raise ValueError("For Hugging Face, please use framework='langchain'")
193
+
194
+ else:
195
+ raise ValueError(f"Unsupported provider for openai-sdk-agent: {provider}")
196
+
197
+ else:
198
+ raise ValueError(f"Unsupported framework: {framework}")
199
+
200
+
201
+ @staticmethod
202
+ def num_tokens_from_messages(messages, model: str = "gpt-4o"):
203
+ """
204
+ Return the number of tokens used by a list of messages.
205
+ """
206
+ try:
207
+ encoding = tiktoken.encoding_for_model(model)
208
+ except KeyError:
209
+ encoding = tiktoken.get_encoding("cl100k_base")
210
+
211
+ tokens_per_message = 3
212
+ num_tokens = 0
213
+
214
+ for message in messages:
215
+ num_tokens += tokens_per_message
216
+ for key, value in message.items():
217
+ if key == "name":
218
+ num_tokens += 1
219
+
220
+ # Encode values if they are strings
221
+ if isinstance(value, str):
222
+ num_tokens += len(encoding.encode(value))
223
+ elif isinstance(value, list) and key == "content":
224
+ for part in value:
225
+ if isinstance(part, dict) and part.get("type") == "text":
226
+ num_tokens += len(encoding.encode(part.get("text", "")))
227
+ elif isinstance(part, dict) and part.get("type") == "image_url":
228
+ num_tokens += 85
229
+
230
+ num_tokens += 3
231
+ return num_tokens
232
+
233
+
234
+ class EmbeddingFactory:
235
+ """
236
+ A static utility class to create and return Embedding Model instances.
237
+ """
238
+
239
+ @staticmethod
240
+ def get_embedding_model(provider: str = "openai",
241
+ model_name: str = "text-embedding-3-small"
242
+ ) -> Union[AzureOpenAIEmbeddings, OpenAIEmbeddings, OllamaEmbeddings, HuggingFaceEmbeddings]:
243
+
244
+ if provider.lower() == "azure":
245
+ token_provider = get_bearer_token_provider(
246
+ DefaultAzureCredential(), "https://cognitiveservices.azure.com/.default"
247
+ )
248
+ return AzureOpenAIEmbeddings(
249
+ azure_endpoint=os.environ["AZURE_OPENAI_API_URI"],
250
+ azure_deployment=os.environ.get("AZURE_OPENAI_EMBEDDING_DEPLOYMENT", model_name),
251
+ api_version=os.environ["AZURE_OPENAI_API_VERSION"],
252
+ azure_ad_token_provider=token_provider,
253
+ )
254
+ elif provider.lower() == "openai":
255
+ return OpenAIEmbeddings(
256
+ api_key=os.environ["OPENAI_API_KEY"],
257
+ model=model_name
258
+ )
259
+ elif provider.lower() == "ollama":
260
+ return OllamaEmbeddings(model=model_name)
261
+ elif provider.lower() == "huggingface":
262
+ if os.environ.get("HF_TOKEN"):
263
+ login(token=os.environ.get("HF_TOKEN"))
264
+ return HuggingFaceEmbeddings(model_name=model_name)
265
+ else:
266
+ raise ValueError(f"Unsupported embedding provider: {provider}")
267
+
268
+ # =================================================================================================
269
+ # GLOBAL HELPER FUNCTIONS (for agents)
270
+ # =================================================================================================
271
+
272
+ # model used for orchestrator or executor
273
+ # def get_model(provider:str = "google", framework:str = "openai-sdk", model_name:str = "gemini-2.5-flash"):
274
+ def get_model(provider:str = "openai", framework:str = "openai", model_name:str = "gpt-4-turbo"):
275
+ # def get_model(provider:str = "groq", framework:str = "openai-sdk", model_name:str = "openai/gpt-oss-120b"):
276
+ model_info = None
277
+ if provider in list["gemini", "google"]:
278
+ model_info = {
279
+ "family": "gemini",
280
+ "vision": True,
281
+ "function_calling": True,
282
+ "json_output": True,
283
+ "structured_output": True,
284
+ }
285
+
286
+ return ModelFactory.get_model( framework=framework,
287
+ provider=provider,
288
+ model_name=model_name,
289
+ model_info=model_info,
290
+ temperature=0)
291
+ # else:
292
+ # return ModelFactory.get_model( framework="openai-sdk",
293
+ # provider="openai",
294
+ # model_name="gpt-4o-mini",
295
+ # temperature=0)
296
+
297
+ # Use this model where agent executing tool and returning JSON
298
+ def get_model_json(model_name: str = "gpt-4.1-mini", provider: str = "openai"):
299
+ return ModelFactory.get_model( framework="openai-sdk",
300
+ provider=provider,
301
+ model_name=model_name,
302
+ temperature=0)
common/utility/openai_model_factory.py ADDED
@@ -0,0 +1,179 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import tiktoken
3
+ from typing import Union
4
+ from azure.identity import DefaultAzureCredential, get_bearer_token_provider
5
+ from agents import OpenAIChatCompletionsModel
6
+ from openai import AsyncOpenAI, AsyncAzureOpenAI
7
+ from langchain_openai import AzureOpenAIEmbeddings, OpenAIEmbeddings
8
+ from langchain_huggingface import HuggingFaceEmbeddings
9
+ from langchain_ollama import OllamaEmbeddings
10
+ from huggingface_hub import login
11
+
12
+ class OpenAIModelFactory:
13
+ """
14
+ Factory for creating OpenAI-SDK compatible model instances (using the 'agents' library).
15
+ Supports multiple providers via the OpenAI-compatible API format.
16
+ """
17
+
18
+ @staticmethod
19
+ def get_model(provider: str = "openai", # openai, azure, google, groq, ollama
20
+ model_name: str = "gpt-4o",
21
+ temperature: float = 0
22
+ ) -> OpenAIChatCompletionsModel:
23
+ """
24
+ Returns an OpenAIChatCompletionsModel instance.
25
+ """
26
+
27
+ # ----------------------------------------------------------------------
28
+ # AZURE OPENAI
29
+ # ----------------------------------------------------------------------
30
+ if provider.lower() == "azure":
31
+ token_provider = get_bearer_token_provider(
32
+ DefaultAzureCredential(), "https://cognitiveservices.azure.com/.default"
33
+ )
34
+ client = AsyncAzureOpenAI(
35
+ azure_endpoint=os.environ["AZURE_OPENAI_API_URI"],
36
+ api_version=os.environ["AZURE_OPENAI_API_VERSION"],
37
+ azure_ad_token_provider=token_provider,
38
+ )
39
+ return OpenAIChatCompletionsModel(model=model_name, openai_client=client)
40
+
41
+ # ----------------------------------------------------------------------
42
+ # STANDARD OPENAI
43
+ # ----------------------------------------------------------------------
44
+ elif provider.lower() == "openai":
45
+ client = AsyncOpenAI(api_key=os.environ["OPENAI_API_KEY"])
46
+ return OpenAIChatCompletionsModel(model=model_name, openai_client=client)
47
+
48
+ # ----------------------------------------------------------------------
49
+ # GOOGLE (GEMINI) via OpenAI Compat
50
+ # ----------------------------------------------------------------------
51
+ elif provider.lower() == "google" or provider.lower() == "gemini":
52
+ GEMINI_BASE_URL = "https://generativelanguage.googleapis.com/v1beta/openai/"
53
+ client = AsyncOpenAI(
54
+ base_url=GEMINI_BASE_URL,
55
+ api_key=os.environ["GOOGLE_API_KEY"]
56
+ )
57
+ return OpenAIChatCompletionsModel(model=model_name, openai_client=client)
58
+
59
+ # ----------------------------------------------------------------------
60
+ # GROQ via OpenAI Compat
61
+ # ----------------------------------------------------------------------
62
+ elif provider.lower() == "groq":
63
+ GROQ_BASE_URL = "https://api.groq.com/openai/v1"
64
+ client = AsyncOpenAI(
65
+ base_url=GROQ_BASE_URL,
66
+ api_key=os.environ["GROQ_API_KEY"]
67
+ )
68
+ return OpenAIChatCompletionsModel(model=model_name, openai_client=client)
69
+
70
+ # ----------------------------------------------------------------------
71
+ # OLLAMA via OpenAI Compat
72
+ # ----------------------------------------------------------------------
73
+ elif provider.lower() == "ollama":
74
+ client = AsyncOpenAI(
75
+ base_url="http://localhost:11434/v1",
76
+ api_key="ollama"
77
+ )
78
+ return OpenAIChatCompletionsModel(model=model_name, openai_client=client)
79
+
80
+ # ----------------------------------------------------------------------
81
+ # UNSUPPORTED
82
+ # ----------------------------------------------------------------------
83
+ else:
84
+ raise ValueError(f"Unsupported provider for OpenAIModelFactory: {provider}")
85
+
86
+
87
+ @staticmethod
88
+ def num_tokens_from_messages(messages, model: str = "gpt-4o"):
89
+ """
90
+ Return the number of tokens used by a list of messages.
91
+ """
92
+ try:
93
+ encoding = tiktoken.encoding_for_model(model)
94
+ except KeyError:
95
+ encoding = tiktoken.get_encoding("cl100k_base")
96
+
97
+ tokens_per_message = 3
98
+ num_tokens = 0
99
+
100
+ for message in messages:
101
+ num_tokens += tokens_per_message
102
+ for key, value in message.items():
103
+ if key == "name":
104
+ num_tokens += 1
105
+
106
+ # Encode values if they are strings
107
+ if isinstance(value, str):
108
+ num_tokens += len(encoding.encode(value))
109
+ elif isinstance(value, list) and key == "content":
110
+ for part in value:
111
+ if isinstance(part, dict) and part.get("type") == "text":
112
+ num_tokens += len(encoding.encode(part.get("text", "")))
113
+ elif isinstance(part, dict) and part.get("type") == "image_url":
114
+ num_tokens += 85
115
+
116
+ num_tokens += 3
117
+ return num_tokens
118
+
119
+
120
+ class EmbeddingFactory:
121
+ """
122
+ A static utility class to create and return Embedding Model instances.
123
+ """
124
+
125
+ @staticmethod
126
+ def get_embedding_model(provider: str = "openai",
127
+ model_name: str = "text-embedding-3-small"
128
+ ) -> Union[AzureOpenAIEmbeddings, OpenAIEmbeddings, OllamaEmbeddings, HuggingFaceEmbeddings]:
129
+
130
+ if provider.lower() == "azure":
131
+ token_provider = get_bearer_token_provider(
132
+ DefaultAzureCredential(), "https://cognitiveservices.azure.com/.default"
133
+ )
134
+ return AzureOpenAIEmbeddings(
135
+ azure_endpoint=os.environ["AZURE_OPENAI_API_URI"],
136
+ azure_deployment=os.environ.get("AZURE_OPENAI_EMBEDDING_DEPLOYMENT", model_name),
137
+ api_version=os.environ["AZURE_OPENAI_API_VERSION"],
138
+ azure_ad_token_provider=token_provider,
139
+ )
140
+ elif provider.lower() == "openai":
141
+ return OpenAIEmbeddings(
142
+ api_key=os.environ["OPENAI_API_KEY"],
143
+ model=model_name
144
+ )
145
+ elif provider.lower() == "ollama":
146
+ return OllamaEmbeddings(model=model_name)
147
+ elif provider.lower() == "huggingface":
148
+ if os.environ.get("HF_TOKEN"):
149
+ login(token=os.environ.get("HF_TOKEN"))
150
+ return HuggingFaceEmbeddings(model_name=model_name)
151
+ else:
152
+ raise ValueError(f"Unsupported embedding provider: {provider}")
153
+
154
+
155
+ # =================================================================================================
156
+ # GLOBAL HELPER FUNCTIONS
157
+ # =================================================================================================
158
+
159
+ def get_model(provider:str = "openai", model_name:str = "gpt-4o"):
160
+ """
161
+ Global helper to get an OpenAI-SDK compatible model.
162
+ Defaults to OpenAI provider and gpt-4o.
163
+ """
164
+ return OpenAIModelFactory.get_model(
165
+ provider=provider,
166
+ model_name=model_name,
167
+ temperature=0
168
+ )
169
+
170
+ def get_model_json(model_name: str = "gpt-4o-2024-08-06", provider: str = "openai"):
171
+ """
172
+ Global helper to get a JSON-capable model (Structured Outputs).
173
+ Defaults to gpt-4o-2024-08-06 on OpenAI.
174
+ """
175
+ return OpenAIModelFactory.get_model(
176
+ provider=provider,
177
+ model_name=model_name,
178
+ temperature=0
179
+ )
pyproject.toml CHANGED
@@ -31,7 +31,7 @@ dependencies = [
31
  "langchain-text-splitters>=1.0.0",
32
  "langchain-chroma>=1.0.0",
33
  "html2text>=2025.4.15",
34
- "langfuse>=2.0.0",
35
 
36
  # =======================
37
  # VECTOR DB / INDEXING
@@ -117,12 +117,13 @@ dependencies = [
117
  # =======================
118
  "ddgs>=9.9.2",
119
  "duckduckgo_search",
 
120
 
121
  # =======================
122
  # OBSERVABILITY
123
  # =======================
124
  "openinference-instrumentation-autogen>=0.1.0",
125
- "openinference-instrumentation-openai>=0.1.0",
126
  "opentelemetry-sdk>=1.20.0",
127
  "opentelemetry-exporter-otlp>=1.20.0",
128
  "opentelemetry-api>=1.20.0",
 
31
  "langchain-text-splitters>=1.0.0",
32
  "langchain-chroma>=1.0.0",
33
  "html2text>=2025.4.15",
34
+ "traceloop-sdk>=0.33.0",
35
 
36
  # =======================
37
  # VECTOR DB / INDEXING
 
117
  # =======================
118
  "ddgs>=9.9.2",
119
  "duckduckgo_search",
120
+ "azure-identity>=1.25.1",
121
 
122
  # =======================
123
  # OBSERVABILITY
124
  # =======================
125
  "openinference-instrumentation-autogen>=0.1.0",
126
+ "openinference-instrumentation-openai>=0.1.15",
127
  "opentelemetry-sdk>=1.20.0",
128
  "opentelemetry-exporter-otlp>=1.20.0",
129
  "opentelemetry-api>=1.20.0",
run.py CHANGED
@@ -18,7 +18,13 @@ import subprocess
18
  import argparse
19
  from pathlib import Path
20
  from typing import Dict, Optional
 
 
 
21
 
 
 
 
22
 
23
  # App registry - maps app names to their paths and entry points
24
  APP_REGISTRY: Dict[str, Dict[str, str]] = {
@@ -168,7 +174,6 @@ def launch_app(app_name: str, port: Optional[int] = None):
168
  print(f"\n❌ Error launching app: {e}")
169
  sys.exit(1)
170
 
171
-
172
  def main():
173
  """Main entry point."""
174
  parser = argparse.ArgumentParser(
@@ -222,4 +227,5 @@ Available Apps:
222
 
223
 
224
  if __name__ == "__main__":
 
225
  main()
 
18
  import argparse
19
  from pathlib import Path
20
  from typing import Dict, Optional
21
+ from agents import Runner, SQLiteSession
22
+ # from agents import set_trace_processors
23
+ # from langsmith.wrappers import OpenAIAgentsTracingProcessor
24
 
25
+ # Load environment variables explicitly
26
+ from dotenv import load_dotenv
27
+ load_dotenv(override=True)
28
 
29
  # App registry - maps app names to their paths and entry points
30
  APP_REGISTRY: Dict[str, Dict[str, str]] = {
 
174
  print(f"\n❌ Error launching app: {e}")
175
  sys.exit(1)
176
 
 
177
  def main():
178
  """Main entry point."""
179
  parser = argparse.ArgumentParser(
 
227
 
228
 
229
  if __name__ == "__main__":
230
+ # set_trace_processors([OpenAIAgentsTracingProcessor()])
231
  main()
src/deep-research/app.py CHANGED
@@ -5,28 +5,12 @@ import html
5
  from io import BytesIO
6
  import sys
7
  import os
8
-
9
- # LangSmith Configuration (Overwrites)
10
- os.environ["LANGCHAIN_TRACING_V2"] = "true"
11
- os.environ["LANGCHAIN_PROJECT"] = "deep-research"
12
-
13
  from pathlib import Path
14
-
15
- # Add project root
16
- # sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), ".")))
17
-
18
- from dotenv import load_dotenv
19
  from reportlab.platypus import SimpleDocTemplate, Paragraph
20
  from reportlab.lib.styles import getSampleStyleSheet
21
  from appagents.orchestrator import Orchestrator
22
  from agents import SQLiteSession
23
 
24
- # ------------------------------------------------------------------------------
25
- # OpenTelemetry Setup (Removed)
26
- # ------------------------------------------------------------------------------
27
-
28
-
29
- load_dotenv(override=True)
30
 
31
  # --------------------
32
  # Page config
@@ -82,7 +66,7 @@ st.markdown("""
82
  }
83
 
84
  .hero-container {
85
- margin-top: 0;
86
  margin-left: -5rem;
87
  margin-right: -5rem;
88
  padding: 2.5rem 1rem 2rem 1rem; /* Compact desktop padding */
@@ -98,7 +82,7 @@ st.markdown("""
98
  }
99
 
100
  .hero-container {
101
- margin-top: 0;
102
  margin-left: -1rem;
103
  margin-right: -1rem;
104
  padding: 2rem 1rem 1.5rem 1rem; /* Compact mobile padding */
@@ -172,8 +156,8 @@ st.markdown("""
172
  color: #333;
173
  }
174
 
175
- /* Buttons */
176
- .stButton button {
177
  border-radius: 20px; /* Matching healthcare */
178
  min-height: 48px;
179
  font-weight: 500;
@@ -245,7 +229,7 @@ def make_pdf_bytes(text: str) -> bytes:
245
  # --------------------
246
  # Logic
247
  # --------------------
248
- async def run_research(query: str):
249
  session_id = st.session_state.session_id
250
  session = SQLiteSession(f"session_{session_id}.db")
251
  orchestrator = Orchestrator(session=session)
@@ -254,7 +238,7 @@ async def run_research(query: str):
254
  status_container = st.status("🔍 Researching...", expanded=True)
255
 
256
  try:
257
- async for chunk in orchestrator.run(query):
258
  # Filtering heuristic: Orchestrator yields status messages then the final report.
259
  # Status messages are short and specific.
260
  if (chunk.startswith("View trace") or
@@ -286,7 +270,7 @@ async def run_research(query: str):
286
  st.markdown("""
287
  <div class="hero-container">
288
  <div class="hero-title">🧠 Deep Research</div>
289
- <div class="hero-subtitle">OpenAI Agentic Research Assistant</div>
290
  </div>
291
  """, unsafe_allow_html=True)
292
 
@@ -294,7 +278,7 @@ st.markdown("""
294
  with st.sidebar:
295
  st.header("⚙️ Configuration")
296
  research_depth = st.select_slider("Research Depth", options=["Quick", "Standard", "Deep"], value="Standard")
297
- report_format = st.selectbox("Report Format", ["Academic", "Business", "Creative"])
298
  st.caption("Settings affect the tone and depth of the final report.")
299
 
300
  st.divider()
@@ -314,7 +298,7 @@ if not st.session_state.final_report and not st.session_state.is_researching:
314
 
315
  col_c1, col_c2, col_c3 = st.columns([1, 2, 1])
316
  with col_c2:
317
- query = st.text_area("Research Topic", height=60, placeholder="e.g. The future of quantum computing in drug discovery...", label_visibility="collapsed", value="The future of quantum computing in drug discovery")
318
 
319
  col_b1, col_b2, col_b3 = st.columns([1, 1, 1])
320
  with col_b2:
@@ -333,14 +317,14 @@ elif st.session_state.is_researching:
333
  """, unsafe_allow_html=True)
334
 
335
  # Trigger async run
336
- asyncio.run(run_research(st.session_state.current_query))
337
 
338
  else:
339
  # Result View - Title removed to let Sticky Header be the main branding,
340
  # and Report itself be the focus.
341
 
342
  # Action Toolbar
343
- col_a1, col_a2, col_a3, col_a4 = st.columns([2, 1, 1, 2])
344
  with col_a2:
345
  pdf_bytes = make_pdf_bytes(st.session_state.final_report)
346
  st.download_button("📄 Download PDF", pdf_bytes, "report.pdf", mime="application/pdf", use_container_width=True)
 
5
  from io import BytesIO
6
  import sys
7
  import os
 
 
 
 
 
8
  from pathlib import Path
 
 
 
 
 
9
  from reportlab.platypus import SimpleDocTemplate, Paragraph
10
  from reportlab.lib.styles import getSampleStyleSheet
11
  from appagents.orchestrator import Orchestrator
12
  from agents import SQLiteSession
13
 
 
 
 
 
 
 
14
 
15
  # --------------------
16
  # Page config
 
66
  }
67
 
68
  .hero-container {
69
+ margin-top: -3rem;
70
  margin-left: -5rem;
71
  margin-right: -5rem;
72
  padding: 2.5rem 1rem 2rem 1rem; /* Compact desktop padding */
 
82
  }
83
 
84
  .hero-container {
85
+ margin-top: -2rem;
86
  margin-left: -1rem;
87
  margin-right: -1rem;
88
  padding: 2rem 1rem 1.5rem 1rem; /* Compact mobile padding */
 
156
  color: #333;
157
  }
158
 
159
+ /* Buttons (including Download Button) */
160
+ .stButton button, .stDownloadButton button {
161
  border-radius: 20px; /* Matching healthcare */
162
  min-height: 48px;
163
  font-weight: 500;
 
229
  # --------------------
230
  # Logic
231
  # --------------------
232
+ async def run_research(query: str, report_format: str = "Academic", research_depth: str = "Standard"):
233
  session_id = st.session_state.session_id
234
  session = SQLiteSession(f"session_{session_id}.db")
235
  orchestrator = Orchestrator(session=session)
 
238
  status_container = st.status("🔍 Researching...", expanded=True)
239
 
240
  try:
241
+ async for chunk in orchestrator.run(query, report_format=report_format, research_depth=research_depth):
242
  # Filtering heuristic: Orchestrator yields status messages then the final report.
243
  # Status messages are short and specific.
244
  if (chunk.startswith("View trace") or
 
270
  st.markdown("""
271
  <div class="hero-container">
272
  <div class="hero-title">🧠 Deep Research</div>
273
+ <div class="hero-subtitle">OpenAI SDK Powered Agentic Research Assistant</div>
274
  </div>
275
  """, unsafe_allow_html=True)
276
 
 
278
  with st.sidebar:
279
  st.header("⚙️ Configuration")
280
  research_depth = st.select_slider("Research Depth", options=["Quick", "Standard", "Deep"], value="Standard")
281
+ report_format = st.selectbox("Report Format", ["Academic", "Business", "Humorous"], index=2)
282
  st.caption("Settings affect the tone and depth of the final report.")
283
 
284
  st.divider()
 
298
 
299
  col_c1, col_c2, col_c3 = st.columns([1, 2, 1])
300
  with col_c2:
301
+ query = st.text_area("Research Topic", height=60, placeholder="e.g. The future of quantum computing in drug discovery...", label_visibility="collapsed", value="Impact of Quantum Computing on Drug Research.")
302
 
303
  col_b1, col_b2, col_b3 = st.columns([1, 1, 1])
304
  with col_b2:
 
317
  """, unsafe_allow_html=True)
318
 
319
  # Trigger async run
320
+ asyncio.run(run_research(st.session_state.current_query, report_format=report_format, research_depth=research_depth))
321
 
322
  else:
323
  # Result View - Title removed to let Sticky Header be the main branding,
324
  # and Report itself be the focus.
325
 
326
  # Action Toolbar
327
+ col_a1, col_a2, col_a3, col_a4 = st.columns([1, 2, 2, 1])
328
  with col_a2:
329
  pdf_bytes = make_pdf_bytes(st.session_state.final_report)
330
  st.download_button("📄 Download PDF", pdf_bytes, "report.pdf", mime="application/pdf", use_container_width=True)
src/deep-research/appagents/email_agent.py CHANGED
@@ -1,11 +1,10 @@
1
  import os
2
  from typing import Dict
3
-
4
  import sendgrid
5
  from sendgrid.helpers.mail import Email, Mail, Content, To
6
  from agents import Agent, function_tool
7
- from core.logger import log_call
8
-
9
 
10
  @function_tool
11
  @log_call
@@ -28,5 +27,5 @@ email_agent = Agent(
28
  name="Email agent",
29
  instructions=INSTRUCTIONS,
30
  tools=[send_email],
31
- model="gpt-4o-mini",
32
- )
 
1
  import os
2
  from typing import Dict
 
3
  import sendgrid
4
  from sendgrid.helpers.mail import Email, Mail, Content, To
5
  from agents import Agent, function_tool
6
+ from common.utility.logger import log_call
7
+ from core.model import get_model_client
8
 
9
  @function_tool
10
  @log_call
 
27
  name="Email agent",
28
  instructions=INSTRUCTIONS,
29
  tools=[send_email],
30
+ model=get_model_client(),
31
+ )
src/deep-research/appagents/guardrail_agent.py CHANGED
@@ -6,8 +6,7 @@ from agents import (
6
  input_guardrail,
7
  GuardrailFunctionOutput,
8
  )
9
- from tools.time_tools import TimeTools
10
- from openai import AsyncOpenAI
11
 
12
 
13
  # ✅ Step 1: Define structured output schema
@@ -26,7 +25,7 @@ guardrail_agent = Agent(
26
  "Otherwise, set it to false."
27
  ),
28
  output_type=UnparliamentaryCheckOutput,
29
- model="gpt-4o-mini",
30
  )
31
 
32
 
@@ -42,4 +41,4 @@ async def guardrail_against_unparliamentary(ctx, agent, message: str):
42
  "found_unparliamentary_word": result.final_output.model_dump()
43
  },
44
  tripwire_triggered=has_unparliamentary_language,
45
- )
 
6
  input_guardrail,
7
  GuardrailFunctionOutput,
8
  )
9
+ from core.model import get_model_client
 
10
 
11
 
12
  # ✅ Step 1: Define structured output schema
 
25
  "Otherwise, set it to false."
26
  ),
27
  output_type=UnparliamentaryCheckOutput,
28
+ model=get_model_client(),
29
  )
30
 
31
 
 
41
  "found_unparliamentary_word": result.final_output.model_dump()
42
  },
43
  tripwire_triggered=has_unparliamentary_language,
44
+ )
src/deep-research/appagents/orchestrator.py CHANGED
@@ -4,7 +4,7 @@ from appagents.planner_agent import planner_agent, WebSearchItem, WebSearchPlan
4
  from appagents.writer_agent import writer_agent, ReportData
5
  from appagents.email_agent import email_agent
6
  from agents.exceptions import InputGuardrailTripwireTriggered
7
- from core.logger import log_call
8
  import asyncio
9
  from langsmith import traceable
10
 
@@ -16,14 +16,19 @@ class Orchestrator:
16
 
17
  @log_call
18
  @traceable(name="Deep Research Run")
19
- async def run(self, query: str):
20
  """ Run the deep research process, yielding the status updates and the final report"""
21
  trace_id = gen_trace_id()
22
  with trace("Deep Research Orchestrator", trace_id=trace_id):
23
  print(f"View trace: https://platform.openai.com/traces/trace?trace_id={trace_id}")
24
  yield f"View trace: https://platform.openai.com/traces/trace?trace_id={trace_id}"
25
  print("Starting research...")
26
- search_plan = await self.plan_searches(query)
 
 
 
 
 
27
 
28
  if not search_plan or not getattr(search_plan, "searches", []):
29
  note = getattr(search_plan, "note", "")
@@ -34,24 +39,24 @@ class Orchestrator:
34
  yield note or "No search results found, ending research."
35
  return
36
 
37
- yield "Searches planned, starting to search..."
38
  search_results = await self.perform_searches(search_plan)
39
  yield "Searches complete, writing report..."
40
- report = await self.write_report(query, search_results)
41
  yield "Report written, sending email..."
42
  # await self.send_email(report)
43
  # yield "Email sent, research complete"
44
  yield report.markdown_report
45
 
46
  @log_call
47
- async def plan_searches(self, query: str) -> WebSearchPlan:
48
  """Plan the searches to perform for the query."""
49
- print("Planning searches...")
50
 
51
  try:
52
  result = await Runner.run(
53
  planner_agent, # use self. unless global
54
- f"Query: {query}",
55
  session=self.session,
56
  )
57
 
@@ -99,10 +104,15 @@ class Orchestrator:
99
  return None
100
 
101
  @log_call
102
- async def write_report(self, query: str, search_results: list[str]) -> ReportData:
103
  """ Write the report for the query """
104
  print("Thinking about report...")
105
- input = f"Original query: {query}\nSummarized search results: {search_results}"
 
 
 
 
 
106
  result = await Runner.run(
107
  writer_agent,
108
  input,
 
4
  from appagents.writer_agent import writer_agent, ReportData
5
  from appagents.email_agent import email_agent
6
  from agents.exceptions import InputGuardrailTripwireTriggered
7
+ from common.utility.logger import log_call
8
  import asyncio
9
  from langsmith import traceable
10
 
 
16
 
17
  @log_call
18
  @traceable(name="Deep Research Run")
19
+ async def run(self, query: str, report_format: str = "Academic", research_depth: str = "Standard"):
20
  """ Run the deep research process, yielding the status updates and the final report"""
21
  trace_id = gen_trace_id()
22
  with trace("Deep Research Orchestrator", trace_id=trace_id):
23
  print(f"View trace: https://platform.openai.com/traces/trace?trace_id={trace_id}")
24
  yield f"View trace: https://platform.openai.com/traces/trace?trace_id={trace_id}"
25
  print("Starting research...")
26
+
27
+ # Map depth to number of searches
28
+ depth_map = {"Quick": 5, "Standard": 10, "Deep": 15}
29
+ num_searches = depth_map.get(research_depth, 10)
30
+
31
+ search_plan = await self.plan_searches(query, num_searches)
32
 
33
  if not search_plan or not getattr(search_plan, "searches", []):
34
  note = getattr(search_plan, "note", "")
 
39
  yield note or "No search results found, ending research."
40
  return
41
 
42
+ yield f"Planned {len(search_plan.searches)} searches (Depth: {research_depth}), starting to search..."
43
  search_results = await self.perform_searches(search_plan)
44
  yield "Searches complete, writing report..."
45
+ report = await self.write_report(query, search_results, report_format, research_depth)
46
  yield "Report written, sending email..."
47
  # await self.send_email(report)
48
  # yield "Email sent, research complete"
49
  yield report.markdown_report
50
 
51
  @log_call
52
+ async def plan_searches(self, query: str, num_searches: int = 10) -> WebSearchPlan:
53
  """Plan the searches to perform for the query."""
54
+ print(f"Planning {num_searches} searches...")
55
 
56
  try:
57
  result = await Runner.run(
58
  planner_agent, # use self. unless global
59
+ f"Query: {query}\nGenerate exactly {num_searches} search terms.",
60
  session=self.session,
61
  )
62
 
 
104
  return None
105
 
106
  @log_call
107
+ async def write_report(self, query: str, search_results: list[str], report_format: str, research_depth: str) -> ReportData:
108
  """ Write the report for the query """
109
  print("Thinking about report...")
110
+ input = (
111
+ f"Original query: {query}\n"
112
+ f"Report Format: {report_format}\n"
113
+ f"Research Depth: {research_depth}\n"
114
+ f"Summarized search results: {search_results}"
115
+ )
116
  result = await Runner.run(
117
  writer_agent,
118
  input,
src/deep-research/appagents/planner_agent.py CHANGED
@@ -1,14 +1,12 @@
1
  import os
2
  from pydantic import BaseModel, Field
3
- from agents import Agent, OpenAIChatCompletionsModel
4
- from openai import AsyncOpenAI
5
  from tools.time_tools import TimeTools
6
  from appagents.guardrail_agent import guardrail_against_unparliamentary
 
7
 
8
- HOW_MANY_SEARCHES = 10
9
-
10
- INSTRUCTIONS = f"You are a helpful research assistant. Given a query, come up with a set of web searches \
11
- to perform to best answer the query. Output {HOW_MANY_SEARCHES} terms to query for. \
12
  Use the tool to find current date & time, and use it where relevant to inform your search and summary."
13
 
14
 
@@ -21,17 +19,7 @@ class WebSearchItem(BaseModel):
21
  class WebSearchPlan(BaseModel):
22
  searches: list[WebSearchItem] = Field(description="A list of web searches to perform to best answer the query.")
23
 
24
- GEMINI_BASE_URL = "https://generativelanguage.googleapis.com/v1beta/openai/"
25
- google_api_key = os.getenv('GOOGLE_API_KEY')
26
- gemini_client = AsyncOpenAI(base_url=GEMINI_BASE_URL, api_key=google_api_key)
27
- gemini_model = OpenAIChatCompletionsModel(model="gemini-2.0-flash", openai_client=gemini_client)
28
-
29
- GROQ_BASE_URL = "https://api.groq.com/openai/v1"
30
- groq_api_key = os.getenv('GROQ_API_KEY')
31
- groq_client = AsyncOpenAI(base_url=GROQ_BASE_URL, api_key=groq_api_key)
32
- groq_model = OpenAIChatCompletionsModel(model="groq/compound", openai_client=groq_client)
33
-
34
- openai_model = "gpt-4.1-mini"
35
 
36
  # Note: Many models do not like tool call and json output_schema used together.
37
 
 
1
  import os
2
  from pydantic import BaseModel, Field
3
+ from agents import Agent
 
4
  from tools.time_tools import TimeTools
5
  from appagents.guardrail_agent import guardrail_against_unparliamentary
6
+ from core.model import get_model_client
7
 
8
+ INSTRUCTIONS = "You are a helpful research assistant. Given a query, come up with a set of web searches \
9
+ to perform to best answer the query. \
 
 
10
  Use the tool to find current date & time, and use it where relevant to inform your search and summary."
11
 
12
 
 
19
  class WebSearchPlan(BaseModel):
20
  searches: list[WebSearchItem] = Field(description="A list of web searches to perform to best answer the query.")
21
 
22
+ openai_model = get_model_client()
 
 
 
 
 
 
 
 
 
 
23
 
24
  # Note: Many models do not like tool call and json output_schema used together.
25
 
src/deep-research/appagents/search_agent.py CHANGED
@@ -1,11 +1,9 @@
1
  import os
2
- from agents import Agent, OpenAIChatCompletionsModel, WebSearchTool
3
- from openai import AsyncOpenAI
4
- from langsmith import wrappers
5
-
6
-
7
- from agents.model_settings import ModelSettings
8
  from tools.google_tools import GoogleTools
 
 
 
9
 
10
  # INSTRUCTIONS = "You are a research assistant. Given a search term, you search the web for that term and \
11
  # produce a concise summary of the results. The summary must 2-3 paragraphs and less than 300 \
@@ -21,29 +19,16 @@ from tools.google_tools import GoogleTools
21
  # Format the entire output as a single, detailed block of text in markdown format, ensuring ALL source links are visible and preserved."
22
 
23
  INSTRUCTIONS = "You are a research assistant. Given a search term, you search the web for that term and \
24
- produce a concise summary of the results. The summary must 3-5 paragraphs and less than 500 \
25
  words. Capture the main points. Write succintly, no need to have complete sentences or good \
26
  grammar. This will be consumed by someone synthesizing a report, so it's vital you capture the \
27
  essence and ignore any fluff. Do not include any additional commentary other than the summary itself."
28
 
29
- GEMINI_BASE_URL = "https://generativelanguage.googleapis.com/v1beta/openai/"
30
- google_api_key = os.getenv('GOOGLE_API_KEY')
31
- gemini_client = AsyncOpenAI(base_url=GEMINI_BASE_URL, api_key=google_api_key)
32
- gemini_client = wrappers.wrap_openai(gemini_client)
33
- gemini_model = OpenAIChatCompletionsModel(model="gemini-2.0-flash", openai_client=gemini_client)
34
-
35
- # search_agent = Agent(
36
- # name="Search agent",
37
- # instructions=INSTRUCTIONS,
38
- # tools=[WebSearchTool(search_context_size="low")],
39
- # # tools=[GoogleTools.search],
40
- # model="gpt-4o-mini",
41
- # model_settings=ModelSettings(tool_choice="required"),
42
- # )
43
 
44
  # -----------------------------
45
  # CONNECT TO MCP SERVER
46
  # -----------------------------
 
47
  async def setup_mcp_tools():
48
  """
49
  Starts the MCP server via stdio and returns its list of tools
@@ -68,23 +53,12 @@ async def setup_mcp_tools():
68
  print(f"✅ Connected to MCP server with {len(mcp_tools)} tool(s).")
69
  return mcp_tools
70
 
71
- # # Note: Gemini does not like
72
- # search_agent = Agent(
73
- # name="Search agent",
74
- # instructions=INSTRUCTIONS,
75
- # # tools=[WebSearchTool(search_context_size="low")],
76
- # tools=[GoogleTools.search],
77
- # model=gemini_model,
78
- # model_settings=ModelSettings(tool_choice="required"),
79
- # )
80
-
81
-
82
  search_agent = Agent(
83
  name="Search agent",
84
  instructions=INSTRUCTIONS,
85
  # tools=[WebSearchTool(search_context_size="low")],
86
  tools=[GoogleTools.search],
87
- model=gemini_model,
88
  model_settings=ModelSettings(tool_choice="required"),
89
  )
90
 
 
1
  import os
2
+ from agents import Agent
 
 
 
 
 
3
  from tools.google_tools import GoogleTools
4
+ from core.model import get_model_client
5
+ from common.utility.logger import log_call
6
+ from agents.model_settings import ModelSettings
7
 
8
  # INSTRUCTIONS = "You are a research assistant. Given a search term, you search the web for that term and \
9
  # produce a concise summary of the results. The summary must 2-3 paragraphs and less than 300 \
 
19
  # Format the entire output as a single, detailed block of text in markdown format, ensuring ALL source links are visible and preserved."
20
 
21
  INSTRUCTIONS = "You are a research assistant. Given a search term, you search the web for that term and \
22
+ produce a concise summary of the results. The summary must 5-6 paragraphs and less than 500 \
23
  words. Capture the main points. Write succintly, no need to have complete sentences or good \
24
  grammar. This will be consumed by someone synthesizing a report, so it's vital you capture the \
25
  essence and ignore any fluff. Do not include any additional commentary other than the summary itself."
26
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
27
 
28
  # -----------------------------
29
  # CONNECT TO MCP SERVER
30
  # -----------------------------
31
+ @log_call
32
  async def setup_mcp_tools():
33
  """
34
  Starts the MCP server via stdio and returns its list of tools
 
53
  print(f"✅ Connected to MCP server with {len(mcp_tools)} tool(s).")
54
  return mcp_tools
55
 
 
 
 
 
 
 
 
 
 
 
 
56
  search_agent = Agent(
57
  name="Search agent",
58
  instructions=INSTRUCTIONS,
59
  # tools=[WebSearchTool(search_context_size="low")],
60
  tools=[GoogleTools.search],
61
+ model=get_model_client(),
62
  model_settings=ModelSettings(tool_choice="required"),
63
  )
64
 
src/deep-research/appagents/writer_agent.py CHANGED
@@ -1,15 +1,17 @@
1
  import os
2
  from pydantic import BaseModel, Field
3
- from agents import Agent, OpenAIChatCompletionsModel, WebSearchTool
4
- from openai import AsyncOpenAI
5
 
6
  INSTRUCTIONS = (
7
  "You are a senior researcher tasked with writing a cohesive report for a research query. "
8
- "You will be provided with the original query, and some initial research done by a research assistant.\n"
 
9
  "You should first come up with an outline for the report that describes the structure and "
10
  "flow of the report. Then, generate the report and return that as your final output.\n"
11
- "The final output should be in markdown format, and it should be lengthy and detailed. Aim "
12
- "for 5-10 pages of content, at least 1000 words."
 
13
  )
14
 
15
 
@@ -20,22 +22,9 @@ class ReportData(BaseModel):
20
 
21
  follow_up_questions: list[str] = Field(description="Suggested topics to research further")
22
 
23
- GEMINI_BASE_URL = "https://generativelanguage.googleapis.com/v1beta/openai/"
24
- google_api_key = os.getenv('GOOGLE_API_KEY')
25
- gemini_client = AsyncOpenAI(base_url=GEMINI_BASE_URL, api_key=google_api_key)
26
- gemini_model = OpenAIChatCompletionsModel(model="gemini-2.0-flash", openai_client=gemini_client)
27
-
28
-
29
- # writer_agent = Agent(
30
- # name="WriterAgent",
31
- # instructions=INSTRUCTIONS,
32
- # model="gpt-5-mini",
33
- # output_type=ReportData,
34
- # )
35
-
36
  writer_agent = Agent(
37
  name="WriterAgent",
38
  instructions=INSTRUCTIONS,
39
- model=gemini_model,
40
  output_type=ReportData,
41
  )
 
1
  import os
2
  from pydantic import BaseModel, Field
3
+ from agents import Agent
4
+ from core.model import get_model_client
5
 
6
  INSTRUCTIONS = (
7
  "You are a senior researcher tasked with writing a cohesive report for a research query. "
8
+ "You will be provided with the original query, some initial research done by a research assistant, "
9
+ "and a requested Report Format and Research Depth.\n"
10
  "You should first come up with an outline for the report that describes the structure and "
11
  "flow of the report. Then, generate the report and return that as your final output.\n"
12
+ "The final output should be in markdown format. "
13
+ "Adjust the tone, structure, and length based on the requested Report Format and Research Depth. "
14
+ "Make the output colorful and add minimal emojis to make the content appealing and aesthetic."
15
  )
16
 
17
 
 
22
 
23
  follow_up_questions: list[str] = Field(description="Suggested topics to research further")
24
 
 
 
 
 
 
 
 
 
 
 
 
 
 
25
  writer_agent = Agent(
26
  name="WriterAgent",
27
  instructions=INSTRUCTIONS,
28
+ model=get_model_client(),
29
  output_type=ReportData,
30
  )
src/deep-research/core/__init__.py CHANGED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+
2
+ from .model import get_model_client
3
+
4
+ __all__ = ["get_model_client"]
src/deep-research/core/model.py ADDED
@@ -0,0 +1,36 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from common.utility.openai_model_factory import OpenAIModelFactory
2
+
3
+ def get_model_client(provider:str = "openai"):
4
+ if provider.lower() == "google":
5
+ return OpenAIModelFactory.get_model(
6
+ provider="google",
7
+ model_name="gemini-2.5-flash",
8
+ temperature=0
9
+ )
10
+ elif provider.lower() == "openai":
11
+ return OpenAIModelFactory.get_model(
12
+ provider="openai",
13
+ model_name="gpt-4o-mini",
14
+ temperature=0
15
+ )
16
+ elif provider.lower() == "azure":
17
+ return OpenAIModelFactory.get_model(
18
+ provider="azure",
19
+ model_name="gpt-4o-mini",
20
+ temperature=0
21
+ )
22
+ elif provider.lower() == "groq":
23
+ return OpenAIModelFactory.get_model(
24
+ provider="groq",
25
+ model_name="gpt-4o-mini",
26
+ temperature=0
27
+ )
28
+ elif provider.lower() == "ollama":
29
+ return OpenAIModelFactory.get_model(
30
+ provider="ollama",
31
+ model_name="gpt-4o-mini",
32
+ temperature=0
33
+ )
34
+ else:
35
+ raise ValueError(f"Unsupported provider: {provider}")
36
+
src/deep-research/tools/google_tools.py CHANGED
@@ -1,11 +1,7 @@
1
  import os
2
  import requests
3
- from dotenv import load_dotenv
4
  from agents import function_tool
5
- from core.logger import log_call
6
-
7
- # Load environment variables once
8
- load_dotenv()
9
 
10
 
11
  # ============================================================
 
1
  import os
2
  import requests
 
3
  from agents import function_tool
4
+ from common.utility.logger import log_call
 
 
 
5
 
6
 
7
  # ============================================================
src/deep-research/tools/time_tools.py CHANGED
@@ -1,6 +1,8 @@
1
  from datetime import datetime
2
  from agents import function_tool
3
- from core.logger import log_call
 
 
4
 
5
  class TimeTools:
6
  """Provides tools related to current date and time."""
 
1
  from datetime import datetime
2
  from agents import function_tool
3
+ from common.utility.logger import log_call
4
+
5
+
6
 
7
  class TimeTools:
8
  """Provides tools related to current date and time."""
uv.lock CHANGED
@@ -13,6 +13,7 @@ dependencies = [
13
  { name = "asyncio" },
14
  { name = "autogen-agentchat" },
15
  { name = "autogen-ext", extra = ["grpc", "mcp", "ollama", "openai"] },
 
16
  { name = "beautifulsoup4" },
17
  { name = "chromadb" },
18
  { name = "datasets" },
@@ -35,7 +36,6 @@ dependencies = [
35
  { name = "langchain-ollama" },
36
  { name = "langchain-openai" },
37
  { name = "langchain-text-splitters" },
38
- { name = "langfuse" },
39
  { name = "langgraph" },
40
  { name = "langgraph-checkpoint-sqlite" },
41
  { name = "langsmith" },
@@ -46,6 +46,11 @@ dependencies = [
46
  { name = "openai" },
47
  { name = "openai-agents" },
48
  { name = "openai-whisper" },
 
 
 
 
 
49
  { name = "playwright" },
50
  { name = "plotly" },
51
  { name = "polygon-api-client" },
@@ -63,6 +68,7 @@ dependencies = [
63
  { name = "speedtest-cli" },
64
  { name = "streamlit" },
65
  { name = "textblob" },
 
66
  { name = "wikipedia" },
67
  { name = "yfinance" },
68
  { name = "yt-dlp" },
@@ -83,6 +89,7 @@ requires-dist = [
83
  { name = "asyncio" },
84
  { name = "autogen-agentchat", specifier = ">=0.7.5" },
85
  { name = "autogen-ext", extras = ["grpc", "mcp", "ollama", "openai"], specifier = ">=0.7.5" },
 
86
  { name = "beautifulsoup4", specifier = ">=4.12.3" },
87
  { name = "chromadb", specifier = "==1.3.5" },
88
  { name = "datasets", specifier = ">=4.4.1" },
@@ -105,7 +112,6 @@ requires-dist = [
105
  { name = "langchain-ollama", specifier = ">=1.0.0" },
106
  { name = "langchain-openai", specifier = ">=1.0.3" },
107
  { name = "langchain-text-splitters", specifier = ">=1.0.0" },
108
- { name = "langfuse", specifier = ">=2.0.0" },
109
  { name = "langgraph", specifier = ">=1.0.3" },
110
  { name = "langgraph-checkpoint-sqlite", specifier = ">=3.0.0" },
111
  { name = "langsmith", specifier = ">=0.4.43" },
@@ -116,6 +122,11 @@ requires-dist = [
116
  { name = "openai", specifier = ">=2.8.1" },
117
  { name = "openai-agents", specifier = ">=0.5.1" },
118
  { name = "openai-whisper", specifier = ">=1.0.0" },
 
 
 
 
 
119
  { name = "playwright", specifier = ">=1.51.0" },
120
  { name = "plotly", specifier = ">=6.5.0" },
121
  { name = "polygon-api-client", specifier = ">=1.16.3" },
@@ -133,6 +144,7 @@ requires-dist = [
133
  { name = "speedtest-cli", specifier = ">=2.1.3" },
134
  { name = "streamlit", specifier = ">=1.51.0" },
135
  { name = "textblob", specifier = ">=0.17.1" },
 
136
  { name = "wikipedia", specifier = ">=1.4.0" },
137
  { name = "yfinance", specifier = ">=0.2.66" },
138
  { name = "yt-dlp", specifier = ">=2025.11.12" },
@@ -405,6 +417,35 @@ openai = [
405
  { name = "tiktoken" },
406
  ]
407
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
408
  [[package]]
409
  name = "backoff"
410
  version = "2.2.1"
@@ -713,6 +754,12 @@ wheels = [
713
  { url = "https://files.pythonhosted.org/packages/e8/cb/2da4cc83f5edb9c3257d09e1e7ab7b23f049c7962cae8d842bbef0a9cec9/cryptography-46.0.3-cp38-abi3-win_arm64.whl", hash = "sha256:d89c3468de4cdc4f08a57e214384d0471911a3830fcdaf7a8cc587e42a866372", size = 2918740, upload-time = "2025-10-15T23:18:12.277Z" },
714
  ]
715
 
 
 
 
 
 
 
716
  [[package]]
717
  name = "curl-cffi"
718
  version = "0.13.0"
@@ -810,6 +857,18 @@ wheels = [
810
  { url = "https://files.pythonhosted.org/packages/4e/8c/f3147f5c4b73e7550fe5f9352eaa956ae838d5c51eb58e7a25b9f3e2643b/decorator-5.2.1-py3-none-any.whl", hash = "sha256:d316bb415a2d9e2d2b3abcc4084c6502fc09240e292cd76a76afc106a1c8e04a", size = 9190, upload-time = "2025-02-24T04:41:32.565Z" },
811
  ]
812
 
 
 
 
 
 
 
 
 
 
 
 
 
813
  [[package]]
814
  name = "dill"
815
  version = "0.4.0"
@@ -1377,6 +1436,15 @@ wheels = [
1377
  { url = "https://files.pythonhosted.org/packages/a4/ed/1f1afb2e9e7f38a545d628f864d562a5ae64fe6f7a10e28ffb9b185b4e89/importlib_resources-6.5.2-py3-none-any.whl", hash = "sha256:789cfdc3ed28c78b67a06acb8126751ced69a3d5f79c095a98298cd8a760ccec", size = 37461, upload-time = "2025-01-03T18:51:54.306Z" },
1378
  ]
1379
 
 
 
 
 
 
 
 
 
 
1380
  [[package]]
1381
  name = "iniconfig"
1382
  version = "2.3.0"
@@ -1791,27 +1859,6 @@ wheels = [
1791
  { url = "https://files.pythonhosted.org/packages/1e/97/d362353ab04f865af6f81d4d46e7aa428734aa032de0017934b771fc34b7/langchain_text_splitters-1.0.0-py3-none-any.whl", hash = "sha256:f00c8219d3468f2c5bd951b708b6a7dd9bc3c62d0cfb83124c377f7170f33b2e", size = 33851, upload-time = "2025-10-17T14:33:40.46Z" },
1792
  ]
1793
 
1794
- [[package]]
1795
- name = "langfuse"
1796
- version = "3.10.6"
1797
- source = { registry = "https://pypi.org/simple" }
1798
- dependencies = [
1799
- { name = "backoff" },
1800
- { name = "httpx" },
1801
- { name = "openai" },
1802
- { name = "opentelemetry-api" },
1803
- { name = "opentelemetry-exporter-otlp-proto-http" },
1804
- { name = "opentelemetry-sdk" },
1805
- { name = "packaging" },
1806
- { name = "pydantic" },
1807
- { name = "requests" },
1808
- { name = "wrapt" },
1809
- ]
1810
- sdist = { url = "https://files.pythonhosted.org/packages/e6/70/4ff19dd1085bb4d5007f008a696c8cf989a0ad76eabc512a5cd19ee4a0b7/langfuse-3.10.6.tar.gz", hash = "sha256:fced9ca0416ba7499afa45fbedf831afc0ec824cb283719b9cf429bf5713f205", size = 223656, upload-time = "2025-12-12T13:29:24.048Z" }
1811
- wheels = [
1812
- { url = "https://files.pythonhosted.org/packages/ce/f0/fac7d56ce1136afbbebaddd1dc119fb1b94b5a7489944d0b4c2dcee99ed7/langfuse-3.10.6-py3-none-any.whl", hash = "sha256:36ca490cd64e372b1b94c28063b3fea39b1a8446cabd20172b524d01011a34e1", size = 399347, upload-time = "2025-12-12T13:29:22.462Z" },
1813
- ]
1814
-
1815
  [[package]]
1816
  name = "langgraph"
1817
  version = "1.0.3"
@@ -2124,6 +2171,32 @@ wheels = [
2124
  { url = "https://files.pythonhosted.org/packages/43/e3/7d92a15f894aa0c9c4b49b8ee9ac9850d6e63b03c9c32c0367a13ae62209/mpmath-1.3.0-py3-none-any.whl", hash = "sha256:a0b2b9fe80bbcd81a6647ff13108738cfb482d481d826cc0e02f5b35e5c88d2c", size = 536198, upload-time = "2023-03-07T16:47:09.197Z" },
2125
  ]
2126
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2127
  [[package]]
2128
  name = "multidict"
2129
  version = "6.7.0"
@@ -2488,6 +2561,64 @@ dependencies = [
2488
  ]
2489
  sdist = { url = "https://files.pythonhosted.org/packages/35/8e/d36f8880bcf18ec026a55807d02fe4c7357da9f25aebd92f85178000c0dc/openai_whisper-20250625.tar.gz", hash = "sha256:37a91a3921809d9f44748ffc73c0a55c9f366c85a3ef5c2ae0cc09540432eb96", size = 803191, upload-time = "2025-06-26T01:06:13.34Z" }
2490
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2491
  [[package]]
2492
  name = "opentelemetry-api"
2493
  version = "1.38.0"
@@ -2501,6 +2632,19 @@ wheels = [
2501
  { url = "https://files.pythonhosted.org/packages/ae/a2/d86e01c28300bd41bab8f18afd613676e2bd63515417b77636fc1add426f/opentelemetry_api-1.38.0-py3-none-any.whl", hash = "sha256:2891b0197f47124454ab9f0cf58f3be33faca394457ac3e09daba13ff50aa582", size = 65947, upload-time = "2025-10-16T08:35:30.23Z" },
2502
  ]
2503
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2504
  [[package]]
2505
  name = "opentelemetry-exporter-otlp-proto-common"
2506
  version = "1.38.0"
@@ -2565,159 +2709,719 @@ wheels = [
2565
  ]
2566
 
2567
  [[package]]
2568
- name = "opentelemetry-proto"
2569
- version = "1.38.0"
2570
  source = { registry = "https://pypi.org/simple" }
2571
  dependencies = [
2572
- { name = "protobuf" },
 
 
 
2573
  ]
2574
- sdist = { url = "https://files.pythonhosted.org/packages/51/14/f0c4f0f6371b9cb7f9fa9ee8918bfd59ac7040c7791f1e6da32a1839780d/opentelemetry_proto-1.38.0.tar.gz", hash = "sha256:88b161e89d9d372ce723da289b7da74c3a8354a8e5359992be813942969ed468", size = 46152, upload-time = "2025-10-16T08:36:01.612Z" }
2575
  wheels = [
2576
- { url = "https://files.pythonhosted.org/packages/b6/6a/82b68b14efca5150b2632f3692d627afa76b77378c4999f2648979409528/opentelemetry_proto-1.38.0-py3-none-any.whl", hash = "sha256:b6ebe54d3217c42e45462e2a1ae28c3e2bf2ec5a5645236a490f55f45f1a0a18", size = 72535, upload-time = "2025-10-16T08:35:45.749Z" },
2577
  ]
2578
 
2579
  [[package]]
2580
- name = "opentelemetry-sdk"
2581
- version = "1.38.0"
2582
  source = { registry = "https://pypi.org/simple" }
2583
  dependencies = [
2584
  { name = "opentelemetry-api" },
 
2585
  { name = "opentelemetry-semantic-conventions" },
2586
- { name = "typing-extensions" },
2587
  ]
2588
- sdist = { url = "https://files.pythonhosted.org/packages/85/cb/f0eee1445161faf4c9af3ba7b848cc22a50a3d3e2515051ad8628c35ff80/opentelemetry_sdk-1.38.0.tar.gz", hash = "sha256:93df5d4d871ed09cb4272305be4d996236eedb232253e3ab864c8620f051cebe", size = 171942, upload-time = "2025-10-16T08:36:02.257Z" }
2589
  wheels = [
2590
- { url = "https://files.pythonhosted.org/packages/2f/2e/e93777a95d7d9c40d270a371392b6d6f1ff170c2a3cb32d6176741b5b723/opentelemetry_sdk-1.38.0-py3-none-any.whl", hash = "sha256:1c66af6564ecc1553d72d811a01df063ff097cdc82ce188da9951f93b8d10f6b", size = 132349, upload-time = "2025-10-16T08:35:46.995Z" },
2591
  ]
2592
 
2593
  [[package]]
2594
- name = "opentelemetry-semantic-conventions"
2595
- version = "0.59b0"
2596
  source = { registry = "https://pypi.org/simple" }
2597
  dependencies = [
2598
  { name = "opentelemetry-api" },
2599
- { name = "typing-extensions" },
 
 
2600
  ]
2601
- sdist = { url = "https://files.pythonhosted.org/packages/40/bc/8b9ad3802cd8ac6583a4eb7de7e5d7db004e89cb7efe7008f9c8a537ee75/opentelemetry_semantic_conventions-0.59b0.tar.gz", hash = "sha256:7a6db3f30d70202d5bf9fa4b69bc866ca6a30437287de6c510fb594878aed6b0", size = 129861, upload-time = "2025-10-16T08:36:03.346Z" }
2602
  wheels = [
2603
- { url = "https://files.pythonhosted.org/packages/24/7d/c88d7b15ba8fe5c6b8f93be50fc11795e9fc05386c44afaf6b76fe191f9b/opentelemetry_semantic_conventions-0.59b0-py3-none-any.whl", hash = "sha256:35d3b8833ef97d614136e253c1da9342b4c3c083bbaf29ce31d572a1c3825eed", size = 207954, upload-time = "2025-10-16T08:35:48.054Z" },
2604
  ]
2605
 
2606
  [[package]]
2607
- name = "orjson"
2608
- version = "3.11.4"
2609
  source = { registry = "https://pypi.org/simple" }
2610
- sdist = { url = "https://files.pythonhosted.org/packages/c6/fe/ed708782d6709cc60eb4c2d8a361a440661f74134675c72990f2c48c785f/orjson-3.11.4.tar.gz", hash = "sha256:39485f4ab4c9b30a3943cfe99e1a213c4776fb69e8abd68f66b83d5a0b0fdc6d", size = 5945188, upload-time = "2025-10-24T15:50:38.027Z" }
 
 
 
 
 
 
 
 
2611
  wheels = [
2612
- { url = "https://files.pythonhosted.org/packages/63/51/6b556192a04595b93e277a9ff71cd0cc06c21a7df98bcce5963fa0f5e36f/orjson-3.11.4-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:d4371de39319d05d3f482f372720b841c841b52f5385bd99c61ed69d55d9ab50", size = 243571, upload-time = "2025-10-24T15:49:10.008Z" },
2613
- { url = "https://files.pythonhosted.org/packages/1c/2c/2602392ddf2601d538ff11848b98621cd465d1a1ceb9db9e8043181f2f7b/orjson-3.11.4-cp312-cp312-macosx_15_0_arm64.whl", hash = "sha256:e41fd3b3cac850eaae78232f37325ed7d7436e11c471246b87b2cd294ec94853", size = 128891, upload-time = "2025-10-24T15:49:11.297Z" },
2614
- { url = "https://files.pythonhosted.org/packages/4e/47/bf85dcf95f7a3a12bf223394a4f849430acd82633848d52def09fa3f46ad/orjson-3.11.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:600e0e9ca042878c7fdf189cf1b028fe2c1418cc9195f6cb9824eb6ed99cb938", size = 130137, upload-time = "2025-10-24T15:49:12.544Z" },
2615
- { url = "https://files.pythonhosted.org/packages/b4/4d/a0cb31007f3ab6f1fd2a1b17057c7c349bc2baf8921a85c0180cc7be8011/orjson-3.11.4-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7bbf9b333f1568ef5da42bc96e18bf30fd7f8d54e9ae066d711056add508e415", size = 129152, upload-time = "2025-10-24T15:49:13.754Z" },
2616
- { url = "https://files.pythonhosted.org/packages/f7/ef/2811def7ce3d8576b19e3929fff8f8f0d44bc5eb2e0fdecb2e6e6cc6c720/orjson-3.11.4-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4806363144bb6e7297b8e95870e78d30a649fdc4e23fc84daa80c8ebd366ce44", size = 136834, upload-time = "2025-10-24T15:49:15.307Z" },
2617
- { url = "https://files.pythonhosted.org/packages/00/d4/9aee9e54f1809cec8ed5abd9bc31e8a9631d19460e3b8470145d25140106/orjson-3.11.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad355e8308493f527d41154e9053b86a5be892b3b359a5c6d5d95cda23601cb2", size = 137519, upload-time = "2025-10-24T15:49:16.557Z" },
2618
- { url = "https://files.pythonhosted.org/packages/db/ea/67bfdb5465d5679e8ae8d68c11753aaf4f47e3e7264bad66dc2f2249e643/orjson-3.11.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c8a7517482667fb9f0ff1b2f16fe5829296ed7a655d04d68cd9711a4d8a4e708", size = 136749, upload-time = "2025-10-24T15:49:17.796Z" },
2619
- { url = "https://files.pythonhosted.org/packages/01/7e/62517dddcfce6d53a39543cd74d0dccfcbdf53967017c58af68822100272/orjson-3.11.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:97eb5942c7395a171cbfecc4ef6701fc3c403e762194683772df4c54cfbb2210", size = 136325, upload-time = "2025-10-24T15:49:19.347Z" },
2620
- { url = "https://files.pythonhosted.org/packages/18/ae/40516739f99ab4c7ec3aaa5cc242d341fcb03a45d89edeeaabc5f69cb2cf/orjson-3.11.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:149d95d5e018bdd822e3f38c103b1a7c91f88d38a88aada5c4e9b3a73a244241", size = 140204, upload-time = "2025-10-24T15:49:20.545Z" },
2621
- { url = "https://files.pythonhosted.org/packages/82/18/ff5734365623a8916e3a4037fcef1cd1782bfc14cf0992afe7940c5320bf/orjson-3.11.4-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:624f3951181eb46fc47dea3d221554e98784c823e7069edb5dbd0dc826ac909b", size = 406242, upload-time = "2025-10-24T15:49:21.884Z" },
2622
- { url = "https://files.pythonhosted.org/packages/e1/43/96436041f0a0c8c8deca6a05ebeaf529bf1de04839f93ac5e7c479807aec/orjson-3.11.4-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:03bfa548cf35e3f8b3a96c4e8e41f753c686ff3d8e182ce275b1751deddab58c", size = 150013, upload-time = "2025-10-24T15:49:23.185Z" },
2623
- { url = "https://files.pythonhosted.org/packages/1b/48/78302d98423ed8780479a1e682b9aecb869e8404545d999d34fa486e573e/orjson-3.11.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:525021896afef44a68148f6ed8a8bf8375553d6066c7f48537657f64823565b9", size = 139951, upload-time = "2025-10-24T15:49:24.428Z" },
2624
- { url = "https://files.pythonhosted.org/packages/4a/7b/ad613fdcdaa812f075ec0875143c3d37f8654457d2af17703905425981bf/orjson-3.11.4-cp312-cp312-win32.whl", hash = "sha256:b58430396687ce0f7d9eeb3dd47761ca7d8fda8e9eb92b3077a7a353a75efefa", size = 136049, upload-time = "2025-10-24T15:49:25.973Z" },
2625
- { url = "https://files.pythonhosted.org/packages/b9/3c/9cf47c3ff5f39b8350fb21ba65d789b6a1129d4cbb3033ba36c8a9023520/orjson-3.11.4-cp312-cp312-win_amd64.whl", hash = "sha256:c6dbf422894e1e3c80a177133c0dda260f81428f9de16d61041949f6a2e5c140", size = 131461, upload-time = "2025-10-24T15:49:27.259Z" },
2626
- { url = "https://files.pythonhosted.org/packages/c6/3b/e2425f61e5825dc5b08c2a5a2b3af387eaaca22a12b9c8c01504f8614c36/orjson-3.11.4-cp312-cp312-win_arm64.whl", hash = "sha256:d38d2bc06d6415852224fcc9c0bfa834c25431e466dc319f0edd56cca81aa96e", size = 126167, upload-time = "2025-10-24T15:49:28.511Z" },
2627
  ]
2628
 
2629
  [[package]]
2630
- name = "ormsgpack"
2631
- version = "1.12.0"
2632
  source = { registry = "https://pypi.org/simple" }
2633
- sdist = { url = "https://files.pythonhosted.org/packages/6c/67/d5ef41c3b4a94400be801984ef7c7fc9623e1a82b643e74eeec367e7462b/ormsgpack-1.12.0.tar.gz", hash = "sha256:94be818fdbb0285945839b88763b269987787cb2f7ef280cad5d6ec815b7e608", size = 49959, upload-time = "2025-11-04T18:30:10.083Z" }
 
 
 
 
 
 
2634
  wheels = [
2635
- { url = "https://files.pythonhosted.org/packages/a2/f2/c1036b2775fcc0cfa5fd618c53bcd3b862ee07298fb627f03af4c7982f84/ormsgpack-1.12.0-cp312-cp312-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:e0c1e08b64d99076fee155276097489b82cc56e8d5951c03c721a65a32f44494", size = 369538, upload-time = "2025-11-04T18:29:37.125Z" },
2636
- { url = "https://files.pythonhosted.org/packages/d9/ca/526c4ae02f3cb34621af91bf8282a10d666757c2e0c6ff391ff5d403d607/ormsgpack-1.12.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3fd43bcb299131690b8e0677af172020b2ada8e625169034b42ac0c13adf84aa", size = 195872, upload-time = "2025-11-04T18:29:38.34Z" },
2637
- { url = "https://files.pythonhosted.org/packages/7f/0f/83bb7968e9715f6a85be53d041b1e6324a05428f56b8b980dac866886871/ormsgpack-1.12.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5f0149d595341e22ead340bf281b2995c4cc7dc8d522a6b5f575fe17aa407604", size = 206469, upload-time = "2025-11-04T18:29:39.749Z" },
2638
- { url = "https://files.pythonhosted.org/packages/02/e3/9e93ca1065f2d4af035804a842b1ff3025bab580c7918239bb225cd1fee2/ormsgpack-1.12.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f19a1b27d169deb553c80fd10b589fc2be1fc14cee779fae79fcaf40db04de2b", size = 208273, upload-time = "2025-11-04T18:29:40.769Z" },
2639
- { url = "https://files.pythonhosted.org/packages/b3/d8/6d6ef901b3a8b8f3ab8836b135a56eb7f66c559003e251d9530bedb12627/ormsgpack-1.12.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6f28896942d655064940dfe06118b7ce1e3468d051483148bf02c99ec157483a", size = 377839, upload-time = "2025-11-04T18:29:42.092Z" },
2640
- { url = "https://files.pythonhosted.org/packages/4c/72/fcb704bfa4c2c3a37b647d597cc45a13cffc9d50baac635a9ad620731d29/ormsgpack-1.12.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:9396efcfa48b4abbc06e44c5dbc3c4574a8381a80cb4cd01eea15d28b38c554e", size = 471446, upload-time = "2025-11-04T18:29:43.133Z" },
2641
- { url = "https://files.pythonhosted.org/packages/84/f8/402e4e3eb997c2ee534c99bec4b5bb359c2a1f9edadf043e254a71e11378/ormsgpack-1.12.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:96586ed537a5fb386a162c4f9f7d8e6f76e07b38a990d50c73f11131e00ff040", size = 381783, upload-time = "2025-11-04T18:29:44.466Z" },
2642
- { url = "https://files.pythonhosted.org/packages/f0/8d/5897b700360bc00911b70ae5ef1134ee7abf5baa81a92a4be005917d3dfd/ormsgpack-1.12.0-cp312-cp312-win_amd64.whl", hash = "sha256:e70387112fb3870e4844de090014212cdcf1342f5022047aecca01ec7de05d7a", size = 112943, upload-time = "2025-11-04T18:29:45.468Z" },
2643
- { url = "https://files.pythonhosted.org/packages/5b/44/1e73649f79bb96d6cf9e5bcbac68b6216d238bba80af351c4c0cbcf7ee15/ormsgpack-1.12.0-cp312-cp312-win_arm64.whl", hash = "sha256:d71290a23de5d4829610c42665d816c661ecad8979883f3f06b2e3ab9639962e", size = 106688, upload-time = "2025-11-04T18:29:46.411Z" },
2644
  ]
2645
 
2646
  [[package]]
2647
- name = "overrides"
2648
- version = "7.7.0"
2649
  source = { registry = "https://pypi.org/simple" }
2650
- sdist = { url = "https://files.pythonhosted.org/packages/36/86/b585f53236dec60aba864e050778b25045f857e17f6e5ea0ae95fe80edd2/overrides-7.7.0.tar.gz", hash = "sha256:55158fa3d93b98cc75299b1e67078ad9003ca27945c76162c1c0766d6f91820a", size = 22812, upload-time = "2024-01-27T21:01:33.423Z" }
 
 
 
 
 
 
2651
  wheels = [
2652
- { url = "https://files.pythonhosted.org/packages/2c/ab/fc8290c6a4c722e5514d80f62b2dc4c4df1a68a41d1364e625c35990fcf3/overrides-7.7.0-py3-none-any.whl", hash = "sha256:c7ed9d062f78b8e4c1a7b70bd8796b35ead4d9f510227ef9c5dc7626c60d7e49", size = 17832, upload-time = "2024-01-27T21:01:31.393Z" },
2653
  ]
2654
 
2655
  [[package]]
2656
- name = "packaging"
2657
- version = "25.0"
2658
  source = { registry = "https://pypi.org/simple" }
2659
- sdist = { url = "https://files.pythonhosted.org/packages/a1/d4/1fc4078c65507b51b96ca8f8c3ba19e6a61c8253c72794544580a7b6c24d/packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f", size = 165727, upload-time = "2025-04-19T11:48:59.673Z" }
 
 
 
 
 
 
2660
  wheels = [
2661
- { url = "https://files.pythonhosted.org/packages/20/12/38679034af332785aac8774540895e234f4d07f7545804097de4b666afd8/packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", size = 66469, upload-time = "2025-04-19T11:48:57.875Z" },
2662
  ]
2663
 
2664
  [[package]]
2665
- name = "pandas"
2666
- version = "2.3.3"
2667
  source = { registry = "https://pypi.org/simple" }
2668
  dependencies = [
2669
- { name = "numpy" },
2670
- { name = "python-dateutil" },
2671
- { name = "pytz" },
2672
- { name = "tzdata" },
2673
  ]
2674
- sdist = { url = "https://files.pythonhosted.org/packages/33/01/d40b85317f86cf08d853a4f495195c73815fdf205eef3993821720274518/pandas-2.3.3.tar.gz", hash = "sha256:e05e1af93b977f7eafa636d043f9f94c7ee3ac81af99c13508215942e64c993b", size = 4495223, upload-time = "2025-09-29T23:34:51.853Z" }
2675
  wheels = [
2676
- { url = "https://files.pythonhosted.org/packages/9c/fb/231d89e8637c808b997d172b18e9d4a4bc7bf31296196c260526055d1ea0/pandas-2.3.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6d21f6d74eb1725c2efaa71a2bfc661a0689579b58e9c0ca58a739ff0b002b53", size = 11597846, upload-time = "2025-09-29T23:19:48.856Z" },
2677
- { url = "https://files.pythonhosted.org/packages/5c/bd/bf8064d9cfa214294356c2d6702b716d3cf3bb24be59287a6a21e24cae6b/pandas-2.3.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3fd2f887589c7aa868e02632612ba39acb0b8948faf5cc58f0850e165bd46f35", size = 10729618, upload-time = "2025-09-29T23:39:08.659Z" },
2678
- { url = "https://files.pythonhosted.org/packages/57/56/cf2dbe1a3f5271370669475ead12ce77c61726ffd19a35546e31aa8edf4e/pandas-2.3.3-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ecaf1e12bdc03c86ad4a7ea848d66c685cb6851d807a26aa245ca3d2017a1908", size = 11737212, upload-time = "2025-09-29T23:19:59.765Z" },
2679
- { url = "https://files.pythonhosted.org/packages/e5/63/cd7d615331b328e287d8233ba9fdf191a9c2d11b6af0c7a59cfcec23de68/pandas-2.3.3-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b3d11d2fda7eb164ef27ffc14b4fcab16a80e1ce67e9f57e19ec0afaf715ba89", size = 12362693, upload-time = "2025-09-29T23:20:14.098Z" },
2680
- { url = "https://files.pythonhosted.org/packages/a6/de/8b1895b107277d52f2b42d3a6806e69cfef0d5cf1d0ba343470b9d8e0a04/pandas-2.3.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a68e15f780eddf2b07d242e17a04aa187a7ee12b40b930bfdd78070556550e98", size = 12771002, upload-time = "2025-09-29T23:20:26.76Z" },
2681
- { url = "https://files.pythonhosted.org/packages/87/21/84072af3187a677c5893b170ba2c8fbe450a6ff911234916da889b698220/pandas-2.3.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:371a4ab48e950033bcf52b6527eccb564f52dc826c02afd9a1bc0ab731bba084", size = 13450971, upload-time = "2025-09-29T23:20:41.344Z" },
2682
- { url = "https://files.pythonhosted.org/packages/86/41/585a168330ff063014880a80d744219dbf1dd7a1c706e75ab3425a987384/pandas-2.3.3-cp312-cp312-win_amd64.whl", hash = "sha256:a16dcec078a01eeef8ee61bf64074b4e524a2a3f4b3be9326420cabe59c4778b", size = 10992722, upload-time = "2025-09-29T23:20:54.139Z" },
2683
  ]
2684
 
2685
  [[package]]
2686
- name = "parso"
2687
- version = "0.8.5"
2688
  source = { registry = "https://pypi.org/simple" }
2689
- sdist = { url = "https://files.pythonhosted.org/packages/d4/de/53e0bcf53d13e005bd8c92e7855142494f41171b34c2536b86187474184d/parso-0.8.5.tar.gz", hash = "sha256:034d7354a9a018bdce352f48b2a8a450f05e9d6ee85db84764e9b6bd96dafe5a", size = 401205, upload-time = "2025-08-23T15:15:28.028Z" }
 
 
 
 
 
 
2690
  wheels = [
2691
- { url = "https://files.pythonhosted.org/packages/16/32/f8e3c85d1d5250232a5d3477a2a28cc291968ff175caeadaf3cc19ce0e4a/parso-0.8.5-py2.py3-none-any.whl", hash = "sha256:646204b5ee239c396d040b90f9e272e9a8017c630092bf59980beb62fd033887", size = 106668, upload-time = "2025-08-23T15:15:25.663Z" },
2692
  ]
2693
 
2694
  [[package]]
2695
- name = "peewee"
2696
- version = "3.18.3"
2697
  source = { registry = "https://pypi.org/simple" }
2698
- sdist = { url = "https://files.pythonhosted.org/packages/6f/60/58e7a307a24044e0e982b99042fcd5a58d0cd928d9c01829574d7553ee8d/peewee-3.18.3.tar.gz", hash = "sha256:62c3d93315b1a909360c4b43c3a573b47557a1ec7a4583a71286df2a28d4b72e", size = 3026296, upload-time = "2025-11-03T16:43:46.678Z" }
 
 
 
 
 
 
 
 
 
2699
 
2700
  [[package]]
2701
- name = "pexpect"
2702
- version = "4.9.0"
2703
  source = { registry = "https://pypi.org/simple" }
2704
  dependencies = [
2705
- { name = "ptyprocess" },
 
 
 
2706
  ]
2707
- sdist = { url = "https://files.pythonhosted.org/packages/42/92/cc564bf6381ff43ce1f4d06852fc19a2f11d180f23dc32d9588bee2f149d/pexpect-4.9.0.tar.gz", hash = "sha256:ee7d41123f3c9911050ea2c2dac107568dc43b2d3b0c7557a33212c398ead30f", size = 166450, upload-time = "2023-11-25T09:07:26.339Z" }
2708
  wheels = [
2709
- { url = "https://files.pythonhosted.org/packages/9e/c3/059298687310d527a58bb01f3b1965787ee3b40dce76752eda8b44e9a2c5/pexpect-4.9.0-py2.py3-none-any.whl", hash = "sha256:7236d1e080e4936be2dc3e326cec0af72acf9212a7e1d060210e70a47e253523", size = 63772, upload-time = "2023-11-25T06:56:14.81Z" },
2710
  ]
2711
 
2712
  [[package]]
2713
- name = "pillow"
2714
- version = "12.0.0"
2715
  source = { registry = "https://pypi.org/simple" }
2716
- sdist = { url = "https://files.pythonhosted.org/packages/5a/b0/cace85a1b0c9775a9f8f5d5423c8261c858760e2466c79b2dd184638b056/pillow-12.0.0.tar.gz", hash = "sha256:87d4f8125c9988bfbed67af47dd7a953e2fc7b0cc1e7800ec6d2080d490bb353", size = 47008828, upload-time = "2025-10-15T18:24:14.008Z" }
 
 
 
 
 
 
2717
  wheels = [
2718
- { url = "https://files.pythonhosted.org/packages/2c/90/4fcce2c22caf044e660a198d740e7fbc14395619e3cb1abad12192c0826c/pillow-12.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:53561a4ddc36facb432fae7a9d8afbfaf94795414f5cdc5fc52f28c1dca90371", size = 5249377, upload-time = "2025-10-15T18:22:05.993Z" },
2719
- { url = "https://files.pythonhosted.org/packages/fd/e0/ed960067543d080691d47d6938ebccbf3976a931c9567ab2fbfab983a5dd/pillow-12.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:71db6b4c1653045dacc1585c1b0d184004f0d7e694c7b34ac165ca70c0838082", size = 4650343, upload-time = "2025-10-15T18:22:07.718Z" },
2720
- { url = "https://files.pythonhosted.org/packages/e7/a1/f81fdeddcb99c044bf7d6faa47e12850f13cee0849537a7d27eeab5534d4/pillow-12.0.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:2fa5f0b6716fc88f11380b88b31fe591a06c6315e955c096c35715788b339e3f", size = 6232981, upload-time = "2025-10-15T18:22:09.287Z" },
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2721
  { url = "https://files.pythonhosted.org/packages/88/e1/9098d3ce341a8750b55b0e00c03f1630d6178f38ac191c81c97a3b047b44/pillow-12.0.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:82240051c6ca513c616f7f9da06e871f61bfd7805f566275841af15015b8f98d", size = 8041399, upload-time = "2025-10-15T18:22:10.872Z" },
2722
  { url = "https://files.pythonhosted.org/packages/a7/62/a22e8d3b602ae8cc01446d0c57a54e982737f44b6f2e1e019a925143771d/pillow-12.0.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:55f818bd74fe2f11d4d7cbc65880a843c4075e0ac7226bc1a23261dbea531953", size = 6347740, upload-time = "2025-10-15T18:22:12.769Z" },
2723
  { url = "https://files.pythonhosted.org/packages/4f/87/424511bdcd02c8d7acf9f65caa09f291a519b16bd83c3fb3374b3d4ae951/pillow-12.0.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b87843e225e74576437fd5b6a4c2205d422754f84a06942cfaf1dc32243e45a8", size = 7040201, upload-time = "2025-10-15T18:22:14.813Z" },
@@ -3892,6 +4596,65 @@ wheels = [
3892
  { url = "https://files.pythonhosted.org/packages/d0/30/dc54f88dd4a2b5dc8a0279bdd7270e735851848b762aeb1c1184ed1f6b14/tqdm-4.67.1-py3-none-any.whl", hash = "sha256:26445eca388f82e72884e0d580d5464cd801a3ea01e63e5601bdff9ba6a48de2", size = 78540, upload-time = "2024-11-24T20:12:19.698Z" },
3893
  ]
3894
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
3895
  [[package]]
3896
  name = "traitlets"
3897
  version = "5.14.3"
 
13
  { name = "asyncio" },
14
  { name = "autogen-agentchat" },
15
  { name = "autogen-ext", extra = ["grpc", "mcp", "ollama", "openai"] },
16
+ { name = "azure-identity" },
17
  { name = "beautifulsoup4" },
18
  { name = "chromadb" },
19
  { name = "datasets" },
 
36
  { name = "langchain-ollama" },
37
  { name = "langchain-openai" },
38
  { name = "langchain-text-splitters" },
 
39
  { name = "langgraph" },
40
  { name = "langgraph-checkpoint-sqlite" },
41
  { name = "langsmith" },
 
46
  { name = "openai" },
47
  { name = "openai-agents" },
48
  { name = "openai-whisper" },
49
+ { name = "openinference-instrumentation-autogen" },
50
+ { name = "openinference-instrumentation-openai" },
51
+ { name = "opentelemetry-api" },
52
+ { name = "opentelemetry-exporter-otlp" },
53
+ { name = "opentelemetry-sdk" },
54
  { name = "playwright" },
55
  { name = "plotly" },
56
  { name = "polygon-api-client" },
 
68
  { name = "speedtest-cli" },
69
  { name = "streamlit" },
70
  { name = "textblob" },
71
+ { name = "traceloop-sdk" },
72
  { name = "wikipedia" },
73
  { name = "yfinance" },
74
  { name = "yt-dlp" },
 
89
  { name = "asyncio" },
90
  { name = "autogen-agentchat", specifier = ">=0.7.5" },
91
  { name = "autogen-ext", extras = ["grpc", "mcp", "ollama", "openai"], specifier = ">=0.7.5" },
92
+ { name = "azure-identity", specifier = ">=1.25.1" },
93
  { name = "beautifulsoup4", specifier = ">=4.12.3" },
94
  { name = "chromadb", specifier = "==1.3.5" },
95
  { name = "datasets", specifier = ">=4.4.1" },
 
112
  { name = "langchain-ollama", specifier = ">=1.0.0" },
113
  { name = "langchain-openai", specifier = ">=1.0.3" },
114
  { name = "langchain-text-splitters", specifier = ">=1.0.0" },
 
115
  { name = "langgraph", specifier = ">=1.0.3" },
116
  { name = "langgraph-checkpoint-sqlite", specifier = ">=3.0.0" },
117
  { name = "langsmith", specifier = ">=0.4.43" },
 
122
  { name = "openai", specifier = ">=2.8.1" },
123
  { name = "openai-agents", specifier = ">=0.5.1" },
124
  { name = "openai-whisper", specifier = ">=1.0.0" },
125
+ { name = "openinference-instrumentation-autogen", specifier = ">=0.1.0" },
126
+ { name = "openinference-instrumentation-openai", specifier = ">=0.1.15" },
127
+ { name = "opentelemetry-api", specifier = ">=1.20.0" },
128
+ { name = "opentelemetry-exporter-otlp", specifier = ">=1.20.0" },
129
+ { name = "opentelemetry-sdk", specifier = ">=1.20.0" },
130
  { name = "playwright", specifier = ">=1.51.0" },
131
  { name = "plotly", specifier = ">=6.5.0" },
132
  { name = "polygon-api-client", specifier = ">=1.16.3" },
 
144
  { name = "speedtest-cli", specifier = ">=2.1.3" },
145
  { name = "streamlit", specifier = ">=1.51.0" },
146
  { name = "textblob", specifier = ">=0.17.1" },
147
+ { name = "traceloop-sdk", specifier = ">=0.33.0" },
148
  { name = "wikipedia", specifier = ">=1.4.0" },
149
  { name = "yfinance", specifier = ">=0.2.66" },
150
  { name = "yt-dlp", specifier = ">=2025.11.12" },
 
417
  { name = "tiktoken" },
418
  ]
419
 
420
+ [[package]]
421
+ name = "azure-core"
422
+ version = "1.37.0"
423
+ source = { registry = "https://pypi.org/simple" }
424
+ dependencies = [
425
+ { name = "requests" },
426
+ { name = "typing-extensions" },
427
+ ]
428
+ sdist = { url = "https://files.pythonhosted.org/packages/ef/83/41c9371c8298999c67b007e308a0a3c4d6a59c6908fa9c62101f031f886f/azure_core-1.37.0.tar.gz", hash = "sha256:7064f2c11e4b97f340e8e8c6d923b822978be3016e46b7bc4aa4b337cfb48aee", size = 357620, upload-time = "2025-12-11T20:05:13.518Z" }
429
+ wheels = [
430
+ { url = "https://files.pythonhosted.org/packages/ee/34/a9914e676971a13d6cc671b1ed172f9804b50a3a80a143ff196e52f4c7ee/azure_core-1.37.0-py3-none-any.whl", hash = "sha256:b3abe2c59e7d6bb18b38c275a5029ff80f98990e7c90a5e646249a56630fcc19", size = 214006, upload-time = "2025-12-11T20:05:14.96Z" },
431
+ ]
432
+
433
+ [[package]]
434
+ name = "azure-identity"
435
+ version = "1.25.1"
436
+ source = { registry = "https://pypi.org/simple" }
437
+ dependencies = [
438
+ { name = "azure-core" },
439
+ { name = "cryptography" },
440
+ { name = "msal" },
441
+ { name = "msal-extensions" },
442
+ { name = "typing-extensions" },
443
+ ]
444
+ sdist = { url = "https://files.pythonhosted.org/packages/06/8d/1a6c41c28a37eab26dc85ab6c86992c700cd3f4a597d9ed174b0e9c69489/azure_identity-1.25.1.tar.gz", hash = "sha256:87ca8328883de6036443e1c37b40e8dc8fb74898240f61071e09d2e369361456", size = 279826, upload-time = "2025-10-06T20:30:02.194Z" }
445
+ wheels = [
446
+ { url = "https://files.pythonhosted.org/packages/83/7b/5652771e24fff12da9dde4c20ecf4682e606b104f26419d139758cc935a6/azure_identity-1.25.1-py3-none-any.whl", hash = "sha256:e9edd720af03dff020223cd269fa3a61e8f345ea75443858273bcb44844ab651", size = 191317, upload-time = "2025-10-06T20:30:04.251Z" },
447
+ ]
448
+
449
  [[package]]
450
  name = "backoff"
451
  version = "2.2.1"
 
754
  { url = "https://files.pythonhosted.org/packages/e8/cb/2da4cc83f5edb9c3257d09e1e7ab7b23f049c7962cae8d842bbef0a9cec9/cryptography-46.0.3-cp38-abi3-win_arm64.whl", hash = "sha256:d89c3468de4cdc4f08a57e214384d0471911a3830fcdaf7a8cc587e42a866372", size = 2918740, upload-time = "2025-10-15T23:18:12.277Z" },
755
  ]
756
 
757
+ [[package]]
758
+ name = "cuid"
759
+ version = "0.4"
760
+ source = { registry = "https://pypi.org/simple" }
761
+ sdist = { url = "https://files.pythonhosted.org/packages/55/ca/d323556e2bf9bfb63219fbb849ce61bb830cc42d1b25b91cde3815451b91/cuid-0.4.tar.gz", hash = "sha256:74eaba154916a2240405c3631acee708c263ef8fa05a86820b87d0f59f84e978", size = 4986, upload-time = "2023-03-06T00:41:12.708Z" }
762
+
763
  [[package]]
764
  name = "curl-cffi"
765
  version = "0.13.0"
 
857
  { url = "https://files.pythonhosted.org/packages/4e/8c/f3147f5c4b73e7550fe5f9352eaa956ae838d5c51eb58e7a25b9f3e2643b/decorator-5.2.1-py3-none-any.whl", hash = "sha256:d316bb415a2d9e2d2b3abcc4084c6502fc09240e292cd76a76afc106a1c8e04a", size = 9190, upload-time = "2025-02-24T04:41:32.565Z" },
858
  ]
859
 
860
+ [[package]]
861
+ name = "deprecated"
862
+ version = "1.3.1"
863
+ source = { registry = "https://pypi.org/simple" }
864
+ dependencies = [
865
+ { name = "wrapt" },
866
+ ]
867
+ sdist = { url = "https://files.pythonhosted.org/packages/49/85/12f0a49a7c4ffb70572b6c2ef13c90c88fd190debda93b23f026b25f9634/deprecated-1.3.1.tar.gz", hash = "sha256:b1b50e0ff0c1fddaa5708a2c6b0a6588bb09b892825ab2b214ac9ea9d92a5223", size = 2932523, upload-time = "2025-10-30T08:19:02.757Z" }
868
+ wheels = [
869
+ { url = "https://files.pythonhosted.org/packages/84/d0/205d54408c08b13550c733c4b85429e7ead111c7f0014309637425520a9a/deprecated-1.3.1-py2.py3-none-any.whl", hash = "sha256:597bfef186b6f60181535a29fbe44865ce137a5079f295b479886c82729d5f3f", size = 11298, upload-time = "2025-10-30T08:19:00.758Z" },
870
+ ]
871
+
872
  [[package]]
873
  name = "dill"
874
  version = "0.4.0"
 
1436
  { url = "https://files.pythonhosted.org/packages/a4/ed/1f1afb2e9e7f38a545d628f864d562a5ae64fe6f7a10e28ffb9b185b4e89/importlib_resources-6.5.2-py3-none-any.whl", hash = "sha256:789cfdc3ed28c78b67a06acb8126751ced69a3d5f79c095a98298cd8a760ccec", size = 37461, upload-time = "2025-01-03T18:51:54.306Z" },
1437
  ]
1438
 
1439
+ [[package]]
1440
+ name = "inflection"
1441
+ version = "0.5.1"
1442
+ source = { registry = "https://pypi.org/simple" }
1443
+ sdist = { url = "https://files.pythonhosted.org/packages/e1/7e/691d061b7329bc8d54edbf0ec22fbfb2afe61facb681f9aaa9bff7a27d04/inflection-0.5.1.tar.gz", hash = "sha256:1a29730d366e996aaacffb2f1f1cb9593dc38e2ddd30c91250c6dde09ea9b417", size = 15091, upload-time = "2020-08-22T08:16:29.139Z" }
1444
+ wheels = [
1445
+ { url = "https://files.pythonhosted.org/packages/59/91/aa6bde563e0085a02a435aa99b49ef75b0a4b062635e606dab23ce18d720/inflection-0.5.1-py2.py3-none-any.whl", hash = "sha256:f38b2b640938a4f35ade69ac3d053042959b62a0f1076a5bbaa1b9526605a8a2", size = 9454, upload-time = "2020-08-22T08:16:27.816Z" },
1446
+ ]
1447
+
1448
  [[package]]
1449
  name = "iniconfig"
1450
  version = "2.3.0"
 
1859
  { url = "https://files.pythonhosted.org/packages/1e/97/d362353ab04f865af6f81d4d46e7aa428734aa032de0017934b771fc34b7/langchain_text_splitters-1.0.0-py3-none-any.whl", hash = "sha256:f00c8219d3468f2c5bd951b708b6a7dd9bc3c62d0cfb83124c377f7170f33b2e", size = 33851, upload-time = "2025-10-17T14:33:40.46Z" },
1860
  ]
1861
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1862
  [[package]]
1863
  name = "langgraph"
1864
  version = "1.0.3"
 
2171
  { url = "https://files.pythonhosted.org/packages/43/e3/7d92a15f894aa0c9c4b49b8ee9ac9850d6e63b03c9c32c0367a13ae62209/mpmath-1.3.0-py3-none-any.whl", hash = "sha256:a0b2b9fe80bbcd81a6647ff13108738cfb482d481d826cc0e02f5b35e5c88d2c", size = 536198, upload-time = "2023-03-07T16:47:09.197Z" },
2172
  ]
2173
 
2174
+ [[package]]
2175
+ name = "msal"
2176
+ version = "1.34.0"
2177
+ source = { registry = "https://pypi.org/simple" }
2178
+ dependencies = [
2179
+ { name = "cryptography" },
2180
+ { name = "pyjwt", extra = ["crypto"] },
2181
+ { name = "requests" },
2182
+ ]
2183
+ sdist = { url = "https://files.pythonhosted.org/packages/cf/0e/c857c46d653e104019a84f22d4494f2119b4fe9f896c92b4b864b3b045cc/msal-1.34.0.tar.gz", hash = "sha256:76ba83b716ea5a6d75b0279c0ac353a0e05b820ca1f6682c0eb7f45190c43c2f", size = 153961, upload-time = "2025-09-22T23:05:48.989Z" }
2184
+ wheels = [
2185
+ { url = "https://files.pythonhosted.org/packages/c2/dc/18d48843499e278538890dc709e9ee3dea8375f8be8e82682851df1b48b5/msal-1.34.0-py3-none-any.whl", hash = "sha256:f669b1644e4950115da7a176441b0e13ec2975c29528d8b9e81316023676d6e1", size = 116987, upload-time = "2025-09-22T23:05:47.294Z" },
2186
+ ]
2187
+
2188
+ [[package]]
2189
+ name = "msal-extensions"
2190
+ version = "1.3.1"
2191
+ source = { registry = "https://pypi.org/simple" }
2192
+ dependencies = [
2193
+ { name = "msal" },
2194
+ ]
2195
+ sdist = { url = "https://files.pythonhosted.org/packages/01/99/5d239b6156eddf761a636bded1118414d161bd6b7b37a9335549ed159396/msal_extensions-1.3.1.tar.gz", hash = "sha256:c5b0fd10f65ef62b5f1d62f4251d51cbcaf003fcedae8c91b040a488614be1a4", size = 23315, upload-time = "2025-03-14T23:51:03.902Z" }
2196
+ wheels = [
2197
+ { url = "https://files.pythonhosted.org/packages/5e/75/bd9b7bb966668920f06b200e84454c8f3566b102183bc55c5473d96cb2b9/msal_extensions-1.3.1-py3-none-any.whl", hash = "sha256:96d3de4d034504e969ac5e85bae8106c8373b5c6568e4c8fa7af2eca9dbe6bca", size = 20583, upload-time = "2025-03-14T23:51:03.016Z" },
2198
+ ]
2199
+
2200
  [[package]]
2201
  name = "multidict"
2202
  version = "6.7.0"
 
2561
  ]
2562
  sdist = { url = "https://files.pythonhosted.org/packages/35/8e/d36f8880bcf18ec026a55807d02fe4c7357da9f25aebd92f85178000c0dc/openai_whisper-20250625.tar.gz", hash = "sha256:37a91a3921809d9f44748ffc73c0a55c9f366c85a3ef5c2ae0cc09540432eb96", size = 803191, upload-time = "2025-06-26T01:06:13.34Z" }
2563
 
2564
+ [[package]]
2565
+ name = "openinference-instrumentation"
2566
+ version = "0.1.42"
2567
+ source = { registry = "https://pypi.org/simple" }
2568
+ dependencies = [
2569
+ { name = "openinference-semantic-conventions" },
2570
+ { name = "opentelemetry-api" },
2571
+ { name = "opentelemetry-sdk" },
2572
+ { name = "wrapt" },
2573
+ ]
2574
+ sdist = { url = "https://files.pythonhosted.org/packages/00/d0/b19061a21fd6127d2857c77744a36073bba9c1502d1d5e8517b708eb8b7c/openinference_instrumentation-0.1.42.tar.gz", hash = "sha256:2275babc34022e151b5492cfba41d3b12e28377f8e08cb45e5d64fe2d9d7fe37", size = 23954, upload-time = "2025-11-05T01:37:46.869Z" }
2575
+ wheels = [
2576
+ { url = "https://files.pythonhosted.org/packages/c3/71/43ee4616fc95dbd2f560550f199c6652a5eb93f84e8aa0039bc95c19cfe0/openinference_instrumentation-0.1.42-py3-none-any.whl", hash = "sha256:e7521ff90833ef7cc65db526a2f59b76a496180abeaaee30ec6abbbc0b43f8ec", size = 30086, upload-time = "2025-11-05T01:37:43.866Z" },
2577
+ ]
2578
+
2579
+ [[package]]
2580
+ name = "openinference-instrumentation-autogen"
2581
+ version = "0.1.10"
2582
+ source = { registry = "https://pypi.org/simple" }
2583
+ dependencies = [
2584
+ { name = "openinference-instrumentation" },
2585
+ { name = "openinference-semantic-conventions" },
2586
+ { name = "opentelemetry-api" },
2587
+ { name = "opentelemetry-instrumentation" },
2588
+ { name = "opentelemetry-semantic-conventions" },
2589
+ ]
2590
+ sdist = { url = "https://files.pythonhosted.org/packages/a3/fd/80658027b9209aa41b41f1180b5649ceb5b48981467eb7d061b266a1812e/openinference_instrumentation_autogen-0.1.10.tar.gz", hash = "sha256:f32b3a375bdbec789f3510002b358ccfecc9d1654e634915394161de1b3f6419", size = 7222, upload-time = "2025-10-10T03:49:06.06Z" }
2591
+ wheels = [
2592
+ { url = "https://files.pythonhosted.org/packages/07/91/c22730b1a08541ab71a44bfb76ce83835fc6bbc83bee9010a5f7fd8a1608/openinference_instrumentation_autogen-0.1.10-py3-none-any.whl", hash = "sha256:e8ec802b772d9abea7f870004bc220a36c7717266e37c5a376bba96e7a7b68c6", size = 8252, upload-time = "2025-10-10T03:48:51.228Z" },
2593
+ ]
2594
+
2595
+ [[package]]
2596
+ name = "openinference-instrumentation-openai"
2597
+ version = "0.1.41"
2598
+ source = { registry = "https://pypi.org/simple" }
2599
+ dependencies = [
2600
+ { name = "openinference-instrumentation" },
2601
+ { name = "openinference-semantic-conventions" },
2602
+ { name = "opentelemetry-api" },
2603
+ { name = "opentelemetry-instrumentation" },
2604
+ { name = "opentelemetry-semantic-conventions" },
2605
+ { name = "typing-extensions" },
2606
+ { name = "wrapt" },
2607
+ ]
2608
+ sdist = { url = "https://files.pythonhosted.org/packages/66/06/77b2fe7171336f71313936daf1b644a9968da85ff0b473a03ca05cc3d5c1/openinference_instrumentation_openai-0.1.41.tar.gz", hash = "sha256:ef4db680986a613b1639720f9beaa315c9e388c20bc985dbbbdf0f4df007c6e9", size = 22848, upload-time = "2025-12-04T19:58:35.349Z" }
2609
+ wheels = [
2610
+ { url = "https://files.pythonhosted.org/packages/a1/db/48f1f540d335f98fa67891e9c25ad56020be7e7b2c0d4fd5014875fe5ddf/openinference_instrumentation_openai-0.1.41-py3-none-any.whl", hash = "sha256:6fad453446835e51333b660882eacababbf1052689ca53cba444a7d97fa2e910", size = 30273, upload-time = "2025-12-04T19:58:34.17Z" },
2611
+ ]
2612
+
2613
+ [[package]]
2614
+ name = "openinference-semantic-conventions"
2615
+ version = "0.1.25"
2616
+ source = { registry = "https://pypi.org/simple" }
2617
+ sdist = { url = "https://files.pythonhosted.org/packages/0b/68/81c8a0b90334ff11e4f285e4934c57f30bea3ef0c0b9f99b65e7b80fae3b/openinference_semantic_conventions-0.1.25.tar.gz", hash = "sha256:f0a8c2cfbd00195d1f362b4803518341e80867d446c2959bf1743f1894fce31d", size = 12767, upload-time = "2025-11-05T01:37:45.89Z" }
2618
+ wheels = [
2619
+ { url = "https://files.pythonhosted.org/packages/fd/3d/dd14ee2eb8a3f3054249562e76b253a1545c76adbbfd43a294f71acde5c3/openinference_semantic_conventions-0.1.25-py3-none-any.whl", hash = "sha256:3814240f3bd61f05d9562b761de70ee793d55b03bca1634edf57d7a2735af238", size = 10395, upload-time = "2025-11-05T01:37:43.697Z" },
2620
+ ]
2621
+
2622
  [[package]]
2623
  name = "opentelemetry-api"
2624
  version = "1.38.0"
 
2632
  { url = "https://files.pythonhosted.org/packages/ae/a2/d86e01c28300bd41bab8f18afd613676e2bd63515417b77636fc1add426f/opentelemetry_api-1.38.0-py3-none-any.whl", hash = "sha256:2891b0197f47124454ab9f0cf58f3be33faca394457ac3e09daba13ff50aa582", size = 65947, upload-time = "2025-10-16T08:35:30.23Z" },
2633
  ]
2634
 
2635
+ [[package]]
2636
+ name = "opentelemetry-exporter-otlp"
2637
+ version = "1.38.0"
2638
+ source = { registry = "https://pypi.org/simple" }
2639
+ dependencies = [
2640
+ { name = "opentelemetry-exporter-otlp-proto-grpc" },
2641
+ { name = "opentelemetry-exporter-otlp-proto-http" },
2642
+ ]
2643
+ sdist = { url = "https://files.pythonhosted.org/packages/c2/2d/16e3487ddde2dee702bd746dd41950a8789b846d22a1c7e64824aac5ebea/opentelemetry_exporter_otlp-1.38.0.tar.gz", hash = "sha256:2f55acdd475e4136117eff20fbf1b9488b1b0b665ab64407516e1ac06f9c3f9d", size = 6147, upload-time = "2025-10-16T08:35:52.53Z" }
2644
+ wheels = [
2645
+ { url = "https://files.pythonhosted.org/packages/fd/8a/81cd252b16b7d95ec1147982b6af81c7932d23918b4c3b15372531242ddd/opentelemetry_exporter_otlp-1.38.0-py3-none-any.whl", hash = "sha256:bc6562cef229fac8887ed7109fc5abc52315f39d9c03fd487bb8b4ef8fbbc231", size = 7018, upload-time = "2025-10-16T08:35:32.995Z" },
2646
+ ]
2647
+
2648
  [[package]]
2649
  name = "opentelemetry-exporter-otlp-proto-common"
2650
  version = "1.38.0"
 
2709
  ]
2710
 
2711
  [[package]]
2712
+ name = "opentelemetry-instrumentation-agno"
2713
+ version = "0.49.8"
2714
  source = { registry = "https://pypi.org/simple" }
2715
  dependencies = [
2716
+ { name = "opentelemetry-api" },
2717
+ { name = "opentelemetry-instrumentation" },
2718
+ { name = "opentelemetry-semantic-conventions" },
2719
+ { name = "opentelemetry-semantic-conventions-ai" },
2720
  ]
2721
+ sdist = { url = "https://files.pythonhosted.org/packages/b3/d9/1e3b82f9f5068b60d60b2453b1e53a2fdc5dbb6a4754a5129b040483d1b7/opentelemetry_instrumentation_agno-0.49.8.tar.gz", hash = "sha256:0e15a5cade29b59f5c988d781f4b69cad87251da2bb29f4bfe95d3b8ad2ac8de", size = 5795, upload-time = "2025-12-11T20:32:37.318Z" }
2722
  wheels = [
2723
+ { url = "https://files.pythonhosted.org/packages/79/b8/788ef8aa896ad5b7d3ee0929a2fddeb144f3f60c9cd06c93e976a99cff54/opentelemetry_instrumentation_agno-0.49.8-py3-none-any.whl", hash = "sha256:85e64a46bd0407dc46300c581bbf584dc503986cf430cfb0e8435298913be604", size = 8945, upload-time = "2025-12-11T20:31:55.781Z" },
2724
  ]
2725
 
2726
  [[package]]
2727
+ name = "opentelemetry-instrumentation-alephalpha"
2728
+ version = "0.49.8"
2729
  source = { registry = "https://pypi.org/simple" }
2730
  dependencies = [
2731
  { name = "opentelemetry-api" },
2732
+ { name = "opentelemetry-instrumentation" },
2733
  { name = "opentelemetry-semantic-conventions" },
2734
+ { name = "opentelemetry-semantic-conventions-ai" },
2735
  ]
2736
+ sdist = { url = "https://files.pythonhosted.org/packages/6f/dd/a4a0076ed14126b5e4146e280a5e85d578cbdd56429faae07ab38927536b/opentelemetry_instrumentation_alephalpha-0.49.8.tar.gz", hash = "sha256:50caee96f7e72d9cbb1a267e2e9b0d36a02372e5daaf16efcb74da9c5423f4b6", size = 5393, upload-time = "2025-12-11T20:32:38.017Z" }
2737
  wheels = [
2738
+ { url = "https://files.pythonhosted.org/packages/6a/aa/bfdd4811ea9465aa7bfdfd32983d0b698e9a307c4ddcf8e4e72ecf0270c1/opentelemetry_instrumentation_alephalpha-0.49.8-py3-none-any.whl", hash = "sha256:abd9d22443a1c1c0216428cd97f1b412391331db16b86f75ebc31a293c0fed1a", size = 8067, upload-time = "2025-12-11T20:31:56.844Z" },
2739
  ]
2740
 
2741
  [[package]]
2742
+ name = "opentelemetry-instrumentation-anthropic"
2743
+ version = "0.49.8"
2744
  source = { registry = "https://pypi.org/simple" }
2745
  dependencies = [
2746
  { name = "opentelemetry-api" },
2747
+ { name = "opentelemetry-instrumentation" },
2748
+ { name = "opentelemetry-semantic-conventions" },
2749
+ { name = "opentelemetry-semantic-conventions-ai" },
2750
  ]
2751
+ sdist = { url = "https://files.pythonhosted.org/packages/8e/29/953b7fff0c96aa6c6230bf8dcab3ad24387e934fb538eaf94338fd0d4ce1/opentelemetry_instrumentation_anthropic-0.49.8.tar.gz", hash = "sha256:e03c88dd55ec620fa5bbe0f6d93dc96e7547defd7451a593c094a4976da53149", size = 14925, upload-time = "2025-12-11T20:32:38.718Z" }
2752
  wheels = [
2753
+ { url = "https://files.pythonhosted.org/packages/6a/6d/873542bd500d8d2975b2d80123bbfcae64ff5abedbb6cd2444bfa15ad4d7/opentelemetry_instrumentation_anthropic-0.49.8-py3-none-any.whl", hash = "sha256:7b1170557be421fa4aa3da58c98752790127ea31139e0c58dabcf1562a58a492", size = 18462, upload-time = "2025-12-11T20:31:58.871Z" },
2754
  ]
2755
 
2756
  [[package]]
2757
+ name = "opentelemetry-instrumentation-bedrock"
2758
+ version = "0.49.8"
2759
  source = { registry = "https://pypi.org/simple" }
2760
+ dependencies = [
2761
+ { name = "anthropic" },
2762
+ { name = "opentelemetry-api" },
2763
+ { name = "opentelemetry-instrumentation" },
2764
+ { name = "opentelemetry-semantic-conventions" },
2765
+ { name = "opentelemetry-semantic-conventions-ai" },
2766
+ { name = "tokenizers" },
2767
+ ]
2768
+ sdist = { url = "https://files.pythonhosted.org/packages/45/97/91dc2b007ab852546347e87cb5cdec4fbdc76bbcd55dc9afd40dba71c34a/opentelemetry_instrumentation_bedrock-0.49.8.tar.gz", hash = "sha256:5902e861c0cae1a6d23de18375d5bed00bb865cd7543fe3c501e7914b0174081", size = 15323, upload-time = "2025-12-11T20:32:39.471Z" }
2769
  wheels = [
2770
+ { url = "https://files.pythonhosted.org/packages/6f/b4/f4bb6e1a158d9d51da4e865b83d05ecbfc3fc1aa796ca820697a4a0be38f/opentelemetry_instrumentation_bedrock-0.49.8-py3-none-any.whl", hash = "sha256:58bbed9f3b4acfcf2530f4521237368bf28c62b29b5e0b3fba03fdceda01f5cb", size = 19046, upload-time = "2025-12-11T20:31:59.935Z" },
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2771
  ]
2772
 
2773
  [[package]]
2774
+ name = "opentelemetry-instrumentation-chromadb"
2775
+ version = "0.49.8"
2776
  source = { registry = "https://pypi.org/simple" }
2777
+ dependencies = [
2778
+ { name = "opentelemetry-api" },
2779
+ { name = "opentelemetry-instrumentation" },
2780
+ { name = "opentelemetry-semantic-conventions" },
2781
+ { name = "opentelemetry-semantic-conventions-ai" },
2782
+ ]
2783
+ sdist = { url = "https://files.pythonhosted.org/packages/f8/d4/100565579ead39ba56fcb20de6652fafe134062c835e879af97e2d652978/opentelemetry_instrumentation_chromadb-0.49.8.tar.gz", hash = "sha256:c3d772539988fb65e5c64a40cf314025461a384349a7821cabf5b0f333b6ea96", size = 4394, upload-time = "2025-12-11T20:32:40.936Z" }
2784
  wheels = [
2785
+ { url = "https://files.pythonhosted.org/packages/7d/6f/6417209d8fe32d40972e0fc44e742897d3e3cb0fb45309af10b72b6fa4df/opentelemetry_instrumentation_chromadb-0.49.8-py3-none-any.whl", hash = "sha256:45a48a6829b1167749d3fecb961aa238bd611ccba96b032178b6c3d2e686ae41", size = 6301, upload-time = "2025-12-11T20:32:00.878Z" },
 
 
 
 
 
 
 
 
2786
  ]
2787
 
2788
  [[package]]
2789
+ name = "opentelemetry-instrumentation-cohere"
2790
+ version = "0.49.8"
2791
  source = { registry = "https://pypi.org/simple" }
2792
+ dependencies = [
2793
+ { name = "opentelemetry-api" },
2794
+ { name = "opentelemetry-instrumentation" },
2795
+ { name = "opentelemetry-semantic-conventions" },
2796
+ { name = "opentelemetry-semantic-conventions-ai" },
2797
+ ]
2798
+ sdist = { url = "https://files.pythonhosted.org/packages/c9/61/bd8ee6a5bf63757e00e7ac2455b4bc8e9af66db9d945c1ccc123b3319449/opentelemetry_instrumentation_cohere-0.49.8.tar.gz", hash = "sha256:a1e5d82eba59399fc65d505d5b029ed750129376c6c346afb74ad1e988a4ba65", size = 9269, upload-time = "2025-12-11T20:32:41.695Z" }
2799
  wheels = [
2800
+ { url = "https://files.pythonhosted.org/packages/b6/8c/23a969343a9f8ea1da07f1d73bf11623bce245b69861e3a498c8186bd37f/opentelemetry_instrumentation_cohere-0.49.8-py3-none-any.whl", hash = "sha256:257df1f02199aeff74b655e5afed9131f7967995c4d4460bc2c2c3c06c69aab5", size = 12201, upload-time = "2025-12-11T20:32:02.72Z" },
2801
  ]
2802
 
2803
  [[package]]
2804
+ name = "opentelemetry-instrumentation-crewai"
2805
+ version = "0.49.8"
2806
  source = { registry = "https://pypi.org/simple" }
2807
+ dependencies = [
2808
+ { name = "opentelemetry-api" },
2809
+ { name = "opentelemetry-instrumentation" },
2810
+ { name = "opentelemetry-semantic-conventions" },
2811
+ { name = "opentelemetry-semantic-conventions-ai" },
2812
+ ]
2813
+ sdist = { url = "https://files.pythonhosted.org/packages/71/ee/b4ff05d1480d8f1d70f3c201c84994257bbae1935d6294a4a80c5b4f6878/opentelemetry_instrumentation_crewai-0.49.8.tar.gz", hash = "sha256:2fff2a4d545d5cc2910d0e2769d955322bd140f35ed72c9d9b3e3542b152a856", size = 4672, upload-time = "2025-12-11T20:32:42.781Z" }
2814
  wheels = [
2815
+ { url = "https://files.pythonhosted.org/packages/61/01/3793d4551bcd1c7c2a66e3fe4e5bfadc8c73b68383e37655519e6ea916c7/opentelemetry_instrumentation_crewai-0.49.8-py3-none-any.whl", hash = "sha256:4e93af93866833f099e992c8a064b4b1c1ee3830243f2a5f3bfac1a06e444b11", size = 6232, upload-time = "2025-12-11T20:32:03.988Z" },
2816
  ]
2817
 
2818
  [[package]]
2819
+ name = "opentelemetry-instrumentation-google-generativeai"
2820
+ version = "0.49.8"
2821
  source = { registry = "https://pypi.org/simple" }
2822
  dependencies = [
2823
+ { name = "opentelemetry-api" },
2824
+ { name = "opentelemetry-instrumentation" },
2825
+ { name = "opentelemetry-semantic-conventions" },
2826
+ { name = "opentelemetry-semantic-conventions-ai" },
2827
  ]
2828
+ sdist = { url = "https://files.pythonhosted.org/packages/3f/52/de754c97dd010d168f32719aeec3bc633f22fafbd8693b6ce2e75de01a75/opentelemetry_instrumentation_google_generativeai-0.49.8.tar.gz", hash = "sha256:ce012f40786a0629c3bc49cb64ed416e0a8fa04db0f6b13f494a1f05d9db6b0b", size = 9203, upload-time = "2025-12-11T20:32:43.497Z" }
2829
  wheels = [
2830
+ { url = "https://files.pythonhosted.org/packages/62/c4/4b05284090c39acf35f0f627389f2a884f5ba428cb2e479a33628ca848b4/opentelemetry_instrumentation_google_generativeai-0.49.8-py3-none-any.whl", hash = "sha256:113cfab3e44493961cce5b262d4eda3a31c6aa68964d009f4e942f0fbc5be233", size = 12054, upload-time = "2025-12-11T20:32:05.992Z" },
 
 
 
 
 
 
2831
  ]
2832
 
2833
  [[package]]
2834
+ name = "opentelemetry-instrumentation-groq"
2835
+ version = "0.49.8"
2836
  source = { registry = "https://pypi.org/simple" }
2837
+ dependencies = [
2838
+ { name = "opentelemetry-api" },
2839
+ { name = "opentelemetry-instrumentation" },
2840
+ { name = "opentelemetry-semantic-conventions" },
2841
+ { name = "opentelemetry-semantic-conventions-ai" },
2842
+ ]
2843
+ sdist = { url = "https://files.pythonhosted.org/packages/8c/57/cf8c8323daa3eadde1b5f2786c612cb6c836d9f8e83aa633eff3a50d8da7/opentelemetry_instrumentation_groq-0.49.8.tar.gz", hash = "sha256:24db0f8362d36146c05812ff166978310e960f0f9e439b27ca5c040ff936512e", size = 8391, upload-time = "2025-12-11T20:32:44.941Z" }
2844
  wheels = [
2845
+ { url = "https://files.pythonhosted.org/packages/70/52/8058d6053e18eb4896e242c00b4d0458ee91a24f57ae6f5bc5e8ef1a7f79/opentelemetry_instrumentation_groq-0.49.8-py3-none-any.whl", hash = "sha256:7d82eaeda2e325ee733148d2f070f77e7090b44a760c3455a6debcbebc3859a8", size = 11009, upload-time = "2025-12-11T20:32:07.062Z" },
2846
  ]
2847
 
2848
  [[package]]
2849
+ name = "opentelemetry-instrumentation-haystack"
2850
+ version = "0.49.8"
2851
  source = { registry = "https://pypi.org/simple" }
2852
+ dependencies = [
2853
+ { name = "opentelemetry-api" },
2854
+ { name = "opentelemetry-instrumentation" },
2855
+ { name = "opentelemetry-semantic-conventions" },
2856
+ { name = "opentelemetry-semantic-conventions-ai" },
2857
+ ]
2858
+ sdist = { url = "https://files.pythonhosted.org/packages/4f/4e/e8dfe6956112fe61d42f3255f5bebd9061fc4e6c4c735591235b0b9f2b16/opentelemetry_instrumentation_haystack-0.49.8.tar.gz", hash = "sha256:0adf19df216d77f424c66bcd69305f6ea2ac65fa0493dccb2b6bbd9ea4d83925", size = 4504, upload-time = "2025-12-11T20:32:45.856Z" }
2859
+ wheels = [
2860
+ { url = "https://files.pythonhosted.org/packages/f0/80/83ad3784bb811924339634b265842fde05a034cfa0c1f897fcbd66670b96/opentelemetry_instrumentation_haystack-0.49.8-py3-none-any.whl", hash = "sha256:cb9452a08dacea8d55d1494e847c6d5fb18a8557cea30c7592078f6f247531f4", size = 7538, upload-time = "2025-12-11T20:32:08.834Z" },
2861
+ ]
2862
 
2863
  [[package]]
2864
+ name = "opentelemetry-instrumentation-lancedb"
2865
+ version = "0.49.8"
2866
  source = { registry = "https://pypi.org/simple" }
2867
  dependencies = [
2868
+ { name = "opentelemetry-api" },
2869
+ { name = "opentelemetry-instrumentation" },
2870
+ { name = "opentelemetry-semantic-conventions" },
2871
+ { name = "opentelemetry-semantic-conventions-ai" },
2872
  ]
2873
+ sdist = { url = "https://files.pythonhosted.org/packages/a1/95/e5db842a45a9ee9376f3021c32d0a171543c8dd63d74db9c7ef74c299115/opentelemetry_instrumentation_lancedb-0.49.8.tar.gz", hash = "sha256:4fc2178bb25bfebfefb3a28093c18f3c1189f97a1427d2f386b7be6c4404fd28", size = 2992, upload-time = "2025-12-11T20:32:46.635Z" }
2874
  wheels = [
2875
+ { url = "https://files.pythonhosted.org/packages/1f/4d/ca287d0dd4033f7a743064600861ebadd1896916fac541979aa156edb2cd/opentelemetry_instrumentation_lancedb-0.49.8-py3-none-any.whl", hash = "sha256:cc361f8caa154712900613e197dc0ec82b07bfd115ab7f7b0ac56e3bfc80cd29", size = 4778, upload-time = "2025-12-11T20:32:09.789Z" },
2876
  ]
2877
 
2878
  [[package]]
2879
+ name = "opentelemetry-instrumentation-langchain"
2880
+ version = "0.49.8"
2881
  source = { registry = "https://pypi.org/simple" }
2882
+ dependencies = [
2883
+ { name = "opentelemetry-api" },
2884
+ { name = "opentelemetry-instrumentation" },
2885
+ { name = "opentelemetry-semantic-conventions" },
2886
+ { name = "opentelemetry-semantic-conventions-ai" },
2887
+ ]
2888
+ sdist = { url = "https://files.pythonhosted.org/packages/6c/8d/a18f65213ccc5b89770f885ae8aba9cf9d25a11c65832bc8339b43d4342e/opentelemetry_instrumentation_langchain-0.49.8.tar.gz", hash = "sha256:607103e42c7819744b8855194ebe7e16e6751491dcffd1c328554f5671bd90c6", size = 15087, upload-time = "2025-12-11T20:32:47.365Z" }
2889
  wheels = [
2890
+ { url = "https://files.pythonhosted.org/packages/66/96/9ea4a8e3807b78b848fdeec5279287d93b9a446af4c4447e2775bfc036a8/opentelemetry_instrumentation_langchain-0.49.8-py3-none-any.whl", hash = "sha256:e36e0b1c41a05f29a652e7bc846d17d17477872b670b03d7bc5d99e672d0fc43", size = 18828, upload-time = "2025-12-11T20:32:10.849Z" },
2891
+ ]
2892
+
2893
+ [[package]]
2894
+ name = "opentelemetry-instrumentation-llamaindex"
2895
+ version = "0.49.8"
2896
+ source = { registry = "https://pypi.org/simple" }
2897
+ dependencies = [
2898
+ { name = "inflection" },
2899
+ { name = "opentelemetry-api" },
2900
+ { name = "opentelemetry-instrumentation" },
2901
+ { name = "opentelemetry-semantic-conventions" },
2902
+ { name = "opentelemetry-semantic-conventions-ai" },
2903
+ ]
2904
+ sdist = { url = "https://files.pythonhosted.org/packages/ad/b4/39fd8d2d081911cbff23274a4a17d4a79693e53cf8a1ff5c6e2ad436ea54/opentelemetry_instrumentation_llamaindex-0.49.8.tar.gz", hash = "sha256:35052225da6f4b6433562fea16936a62ee7531b478628f2cdfa4a476fd8eab27", size = 12224, upload-time = "2025-12-11T20:32:48.15Z" }
2905
+ wheels = [
2906
+ { url = "https://files.pythonhosted.org/packages/ab/ce/c82563fe6fe150dac050d74230fb7f24e08f98a38f7b7cba14e6579a9a4d/opentelemetry_instrumentation_llamaindex-0.49.8-py3-none-any.whl", hash = "sha256:d812ad82ae41114283d1aecac3048ddac8b16a119dd913c753a402d1faeca6c0", size = 21131, upload-time = "2025-12-11T20:32:12.93Z" },
2907
+ ]
2908
+
2909
+ [[package]]
2910
+ name = "opentelemetry-instrumentation-logging"
2911
+ version = "0.59b0"
2912
+ source = { registry = "https://pypi.org/simple" }
2913
+ dependencies = [
2914
+ { name = "opentelemetry-api" },
2915
+ { name = "opentelemetry-instrumentation" },
2916
+ ]
2917
+ sdist = { url = "https://files.pythonhosted.org/packages/be/88/9c5f70fa8b8d96d30be378fc6eb1776e13aea456db15009f4eaef4928847/opentelemetry_instrumentation_logging-0.59b0.tar.gz", hash = "sha256:1b51116444edc74f699daf9002ded61529397100c9bc903c8b9aaa75a5218c76", size = 9969, upload-time = "2025-10-16T08:39:51.653Z" }
2918
+ wheels = [
2919
+ { url = "https://files.pythonhosted.org/packages/2c/a0/340cc45d71437c2f7e27f13c1d2e335b18bbc7a24fd7d174018500b3c7ba/opentelemetry_instrumentation_logging-0.59b0-py3-none-any.whl", hash = "sha256:fdd4eddbd093fc421df8f7d356ecb15b320a1f3396b56bce5543048a5c457eea", size = 12577, upload-time = "2025-10-16T08:38:58.064Z" },
2920
+ ]
2921
+
2922
+ [[package]]
2923
+ name = "opentelemetry-instrumentation-marqo"
2924
+ version = "0.49.8"
2925
+ source = { registry = "https://pypi.org/simple" }
2926
+ dependencies = [
2927
+ { name = "opentelemetry-api" },
2928
+ { name = "opentelemetry-instrumentation" },
2929
+ { name = "opentelemetry-semantic-conventions" },
2930
+ { name = "opentelemetry-semantic-conventions-ai" },
2931
+ ]
2932
+ sdist = { url = "https://files.pythonhosted.org/packages/19/7c/1175e9b8593d293a302d8b32e8129188fa0bd7eac08a1b43170c248fb174/opentelemetry_instrumentation_marqo-0.49.8.tar.gz", hash = "sha256:7e0f6bd4fd2237b9dddaaf159ea87b14a74ee4907584b64678326d733756b8c4", size = 3265, upload-time = "2025-12-11T20:32:48.946Z" }
2933
+ wheels = [
2934
+ { url = "https://files.pythonhosted.org/packages/60/8e/5149eea2a2fc76a6614603188f8a58d9e985a6898955a8d606884f9c970d/opentelemetry_instrumentation_marqo-0.49.8-py3-none-any.whl", hash = "sha256:f26aff5b2c0c8cd1d4af041605bf70d9f5dd729a1117ba929ae75db3bf4ac933", size = 5078, upload-time = "2025-12-11T20:32:14Z" },
2935
+ ]
2936
+
2937
+ [[package]]
2938
+ name = "opentelemetry-instrumentation-mcp"
2939
+ version = "0.49.8"
2940
+ source = { registry = "https://pypi.org/simple" }
2941
+ dependencies = [
2942
+ { name = "opentelemetry-api" },
2943
+ { name = "opentelemetry-instrumentation" },
2944
+ { name = "opentelemetry-semantic-conventions" },
2945
+ { name = "opentelemetry-semantic-conventions-ai" },
2946
+ ]
2947
+ sdist = { url = "https://files.pythonhosted.org/packages/20/86/7a8b4bd935f6c2e281afaa7a326edf75508a9f374db7b5ea0d56fd4ee2ed/opentelemetry_instrumentation_mcp-0.49.8.tar.gz", hash = "sha256:927b46e4735e746244845c136061e77d9ebdefde560c811b881fa431fe8d5783", size = 8727, upload-time = "2025-12-11T20:32:49.844Z" }
2948
+ wheels = [
2949
+ { url = "https://files.pythonhosted.org/packages/8a/d1/d125024c3b5dcceeef9a06aab40f779b69bbd5e74611d9fb6d8672e2c04e/opentelemetry_instrumentation_mcp-0.49.8-py3-none-any.whl", hash = "sha256:4854bd70a9697a410eac2aab26f14d9fb0d21771626ac1009af9327b3407fde4", size = 10522, upload-time = "2025-12-11T20:32:15.842Z" },
2950
+ ]
2951
+
2952
+ [[package]]
2953
+ name = "opentelemetry-instrumentation-milvus"
2954
+ version = "0.49.8"
2955
+ source = { registry = "https://pypi.org/simple" }
2956
+ dependencies = [
2957
+ { name = "opentelemetry-api" },
2958
+ { name = "opentelemetry-instrumentation" },
2959
+ { name = "opentelemetry-semantic-conventions" },
2960
+ { name = "opentelemetry-semantic-conventions-ai" },
2961
+ ]
2962
+ sdist = { url = "https://files.pythonhosted.org/packages/76/d3/6b3c240867e2dc4f11ce65146133b7c0edf07ed95f8db3626e87194c7bef/opentelemetry_instrumentation_milvus-0.49.8.tar.gz", hash = "sha256:09a8f4f9630e4948c1b725f461e66d50994418def16689394a21d830250f3fa5", size = 5267, upload-time = "2025-12-11T20:32:50.907Z" }
2963
+ wheels = [
2964
+ { url = "https://files.pythonhosted.org/packages/f8/03/e52c50970215e562d9de436c282592a1aa14c11de027b883f9d5a1c08e30/opentelemetry_instrumentation_milvus-0.49.8-py3-none-any.whl", hash = "sha256:087f827af9044d3fe4cdde530ff40877b2d676cd02a9f863c950d7f2e1072dc5", size = 7152, upload-time = "2025-12-11T20:32:16.799Z" },
2965
+ ]
2966
+
2967
+ [[package]]
2968
+ name = "opentelemetry-instrumentation-mistralai"
2969
+ version = "0.49.8"
2970
+ source = { registry = "https://pypi.org/simple" }
2971
+ dependencies = [
2972
+ { name = "opentelemetry-api" },
2973
+ { name = "opentelemetry-instrumentation" },
2974
+ { name = "opentelemetry-semantic-conventions" },
2975
+ { name = "opentelemetry-semantic-conventions-ai" },
2976
+ ]
2977
+ sdist = { url = "https://files.pythonhosted.org/packages/5c/fd/6877c366537016f4aa9cc1cd4274ee75d49e0b93dec947a730d74ae218f5/opentelemetry_instrumentation_mistralai-0.49.8.tar.gz", hash = "sha256:b22050dde6a33115342e819a859b4ab9bf55c79dbd161cdb68d3d973c9d98838", size = 6821, upload-time = "2025-12-11T20:32:51.918Z" }
2978
+ wheels = [
2979
+ { url = "https://files.pythonhosted.org/packages/f8/90/8db94c6516ac78e483e417fd63d0e056c54fa0dff020a758da6a4496f6af/opentelemetry_instrumentation_mistralai-0.49.8-py3-none-any.whl", hash = "sha256:f676fba1fd2d826e82cfc7225a53264ed3fc1d7c188ef4077703bc5bfbfaa181", size = 8951, upload-time = "2025-12-11T20:32:17.833Z" },
2980
+ ]
2981
+
2982
+ [[package]]
2983
+ name = "opentelemetry-instrumentation-ollama"
2984
+ version = "0.49.8"
2985
+ source = { registry = "https://pypi.org/simple" }
2986
+ dependencies = [
2987
+ { name = "opentelemetry-api" },
2988
+ { name = "opentelemetry-instrumentation" },
2989
+ { name = "opentelemetry-semantic-conventions" },
2990
+ { name = "opentelemetry-semantic-conventions-ai" },
2991
+ ]
2992
+ sdist = { url = "https://files.pythonhosted.org/packages/84/df/bbc1fff03deede021be8c13ab3f5ef34553aaed2c7a2444daab20fce1adc/opentelemetry_instrumentation_ollama-0.49.8.tar.gz", hash = "sha256:6cf1dd4644e6316e7baf647b8a85e4c640e483570967559454e008945100be9c", size = 8636, upload-time = "2025-12-11T20:32:52.613Z" }
2993
+ wheels = [
2994
+ { url = "https://files.pythonhosted.org/packages/a9/3e/53680ce24d0ca2591999a4c426a59d61a603eaa8757cc339ca535cc71e93/opentelemetry_instrumentation_ollama-0.49.8-py3-none-any.whl", hash = "sha256:0c76fe34970fb04ddb9b1530d53d7f8e33426e3e433b9a4090ea5ee2921ae6bf", size = 11268, upload-time = "2025-12-11T20:32:19.605Z" },
2995
+ ]
2996
+
2997
+ [[package]]
2998
+ name = "opentelemetry-instrumentation-openai"
2999
+ version = "0.49.8"
3000
+ source = { registry = "https://pypi.org/simple" }
3001
+ dependencies = [
3002
+ { name = "opentelemetry-api" },
3003
+ { name = "opentelemetry-instrumentation" },
3004
+ { name = "opentelemetry-semantic-conventions" },
3005
+ { name = "opentelemetry-semantic-conventions-ai" },
3006
+ ]
3007
+ sdist = { url = "https://files.pythonhosted.org/packages/44/03/a04b74790ae3c5ea80aa257fae07698a9111ad1c58714ef78eb40f070414/opentelemetry_instrumentation_openai-0.49.8.tar.gz", hash = "sha256:2efe4efea59f2708ef3fc470a10d6db11eb7c48328a2729383d9adef89b6b2da", size = 32254, upload-time = "2025-12-11T20:32:53.415Z" }
3008
+ wheels = [
3009
+ { url = "https://files.pythonhosted.org/packages/85/e7/36e0d15a1dfb94faf5fcc70721c6706ccbcf58323b31395b857884c0eb91/opentelemetry_instrumentation_openai-0.49.8-py3-none-any.whl", hash = "sha256:2555694d0f009b2d43776d718a7467229d49e04bb2ab78e2a9880d52674b8393", size = 43003, upload-time = "2025-12-11T20:32:20.844Z" },
3010
+ ]
3011
+
3012
+ [[package]]
3013
+ name = "opentelemetry-instrumentation-openai-agents"
3014
+ version = "0.49.8"
3015
+ source = { registry = "https://pypi.org/simple" }
3016
+ dependencies = [
3017
+ { name = "opentelemetry-api" },
3018
+ { name = "opentelemetry-instrumentation" },
3019
+ { name = "opentelemetry-semantic-conventions" },
3020
+ { name = "opentelemetry-semantic-conventions-ai" },
3021
+ ]
3022
+ sdist = { url = "https://files.pythonhosted.org/packages/b7/a4/6deba8deb7c26d89dce61bc0f26232935c5f1fd19141149f4f6fdb43d338/opentelemetry_instrumentation_openai_agents-0.49.8.tar.gz", hash = "sha256:388af4aff69013e302f2a701a723b04c75da32196deac61ff190c3ec7150223b", size = 8834, upload-time = "2025-12-11T20:32:54.232Z" }
3023
+ wheels = [
3024
+ { url = "https://files.pythonhosted.org/packages/92/c9/a1d5c5f7b6b80dd7ab29259ecc02dcf6e5c6bd3f431cb485f4fe38a6ff14/opentelemetry_instrumentation_openai_agents-0.49.8-py3-none-any.whl", hash = "sha256:ce5e31086a7bf7c528c3a09dcf99f68bcb429b707007466d2326487388686be8", size = 9848, upload-time = "2025-12-11T20:32:23.38Z" },
3025
+ ]
3026
+
3027
+ [[package]]
3028
+ name = "opentelemetry-instrumentation-pinecone"
3029
+ version = "0.49.8"
3030
+ source = { registry = "https://pypi.org/simple" }
3031
+ dependencies = [
3032
+ { name = "opentelemetry-api" },
3033
+ { name = "opentelemetry-instrumentation" },
3034
+ { name = "opentelemetry-semantic-conventions" },
3035
+ { name = "opentelemetry-semantic-conventions-ai" },
3036
+ ]
3037
+ sdist = { url = "https://files.pythonhosted.org/packages/c7/75/1ff41d2240adb76ab052b3fa4c6d85ec5426764ec23ecd0f163e6a2a416a/opentelemetry_instrumentation_pinecone-0.49.8.tar.gz", hash = "sha256:baebbf21aed37656ef28a6dfe283aabb6e02ae25b77e44cab46ee1cce32d09ee", size = 4483, upload-time = "2025-12-11T20:32:54.955Z" }
3038
+ wheels = [
3039
+ { url = "https://files.pythonhosted.org/packages/37/ca/02fa5d84f82fd5831e43a1cf45f102dd82166923b8f6cc2bf8835481268d/opentelemetry_instrumentation_pinecone-0.49.8-py3-none-any.whl", hash = "sha256:a7c4927b2382a741d7c4cf8341a8711baef7e9cfd4fc664283b6815213f32c97", size = 6359, upload-time = "2025-12-11T20:32:24.302Z" },
3040
+ ]
3041
+
3042
+ [[package]]
3043
+ name = "opentelemetry-instrumentation-qdrant"
3044
+ version = "0.49.8"
3045
+ source = { registry = "https://pypi.org/simple" }
3046
+ dependencies = [
3047
+ { name = "opentelemetry-api" },
3048
+ { name = "opentelemetry-instrumentation" },
3049
+ { name = "opentelemetry-semantic-conventions" },
3050
+ { name = "opentelemetry-semantic-conventions-ai" },
3051
+ ]
3052
+ sdist = { url = "https://files.pythonhosted.org/packages/f9/51/afb7e579d377a5192cd76132b813dff7cf8c388563d6952ef31706775db3/opentelemetry_instrumentation_qdrant-0.49.8.tar.gz", hash = "sha256:8ae23adf1d38bc53d870de0569343e85b690e23ea712794d4ff845680404cf61", size = 3815, upload-time = "2025-12-11T20:32:55.689Z" }
3053
+ wheels = [
3054
+ { url = "https://files.pythonhosted.org/packages/34/c6/fb384614d7e201f7cdfcaef3573a732da61f5f2549c13cf18d5364e50ef5/opentelemetry_instrumentation_qdrant-0.49.8-py3-none-any.whl", hash = "sha256:5a951cbbcba8a0b09d0400ee3a7750799b16860047178608f63977a83374acb5", size = 6302, upload-time = "2025-12-11T20:32:25.767Z" },
3055
+ ]
3056
+
3057
+ [[package]]
3058
+ name = "opentelemetry-instrumentation-redis"
3059
+ version = "0.59b0"
3060
+ source = { registry = "https://pypi.org/simple" }
3061
+ dependencies = [
3062
+ { name = "opentelemetry-api" },
3063
+ { name = "opentelemetry-instrumentation" },
3064
+ { name = "opentelemetry-semantic-conventions" },
3065
+ { name = "wrapt" },
3066
+ ]
3067
+ sdist = { url = "https://files.pythonhosted.org/packages/7f/f8/58bf83b10a97f67c7f06505bc4c4accbea7d961dec653a8c9e91fb65887e/opentelemetry_instrumentation_redis-0.59b0.tar.gz", hash = "sha256:d7f1c7c55ab57e10e0155c4c65d028a7e436aec7ccc7ccbf1d77e8cd12b55abd", size = 13922, upload-time = "2025-10-16T08:39:59.507Z" }
3068
+ wheels = [
3069
+ { url = "https://files.pythonhosted.org/packages/54/87/fef04827239ce84e2729b11611e8d5be7892288f620961ee9b9bafd035c5/opentelemetry_instrumentation_redis-0.59b0-py3-none-any.whl", hash = "sha256:8f7494dede5a6bfe5d8f20da67b371a502883398081856378380efef27da0bdf", size = 14946, upload-time = "2025-10-16T08:39:07.887Z" },
3070
+ ]
3071
+
3072
+ [[package]]
3073
+ name = "opentelemetry-instrumentation-replicate"
3074
+ version = "0.49.8"
3075
+ source = { registry = "https://pypi.org/simple" }
3076
+ dependencies = [
3077
+ { name = "opentelemetry-api" },
3078
+ { name = "opentelemetry-instrumentation" },
3079
+ { name = "opentelemetry-semantic-conventions" },
3080
+ { name = "opentelemetry-semantic-conventions-ai" },
3081
+ ]
3082
+ sdist = { url = "https://files.pythonhosted.org/packages/7b/5c/644e63c80f81581a3c40c5d78197a8c48e6b810981ac3d4485fce4e76697/opentelemetry_instrumentation_replicate-0.49.8.tar.gz", hash = "sha256:b38e63e65695e57a2431c83c91d0245a30064d80e21a64e8ebcaf5a6165bd2c6", size = 5346, upload-time = "2025-12-11T20:32:56.427Z" }
3083
+ wheels = [
3084
+ { url = "https://files.pythonhosted.org/packages/7c/cc/fe9c1593c1e0b5428e0d5b74d06bfec86e8babaa4aff77683f36e95bbc26/opentelemetry_instrumentation_replicate-0.49.8-py3-none-any.whl", hash = "sha256:34ef286f0da5e5d686a089ffb043796319d1e5a8d43a35661446fa15bed55e81", size = 8194, upload-time = "2025-12-11T20:32:28.07Z" },
3085
+ ]
3086
+
3087
+ [[package]]
3088
+ name = "opentelemetry-instrumentation-requests"
3089
+ version = "0.59b0"
3090
+ source = { registry = "https://pypi.org/simple" }
3091
+ dependencies = [
3092
+ { name = "opentelemetry-api" },
3093
+ { name = "opentelemetry-instrumentation" },
3094
+ { name = "opentelemetry-semantic-conventions" },
3095
+ { name = "opentelemetry-util-http" },
3096
+ ]
3097
+ sdist = { url = "https://files.pythonhosted.org/packages/49/01/31282a46b09684dfc636bc066deb090bae6973e71e85e253a8c74e727b1f/opentelemetry_instrumentation_requests-0.59b0.tar.gz", hash = "sha256:9af2ffe3317f03074d7f865919139e89170b6763a0251b68c25e8e64e04b3400", size = 15186, upload-time = "2025-10-16T08:40:00.558Z" }
3098
+ wheels = [
3099
+ { url = "https://files.pythonhosted.org/packages/e5/ea/c282ba418b2669e4f730cb3f68b02a0ca65f4baf801e971169a4cc449ffb/opentelemetry_instrumentation_requests-0.59b0-py3-none-any.whl", hash = "sha256:d43121532877e31a46c48649279cec2504ee1e0ceb3c87b80fe5ccd7eafc14c1", size = 12966, upload-time = "2025-10-16T08:39:09.919Z" },
3100
+ ]
3101
+
3102
+ [[package]]
3103
+ name = "opentelemetry-instrumentation-sagemaker"
3104
+ version = "0.49.8"
3105
+ source = { registry = "https://pypi.org/simple" }
3106
+ dependencies = [
3107
+ { name = "opentelemetry-api" },
3108
+ { name = "opentelemetry-instrumentation" },
3109
+ { name = "opentelemetry-semantic-conventions" },
3110
+ { name = "opentelemetry-semantic-conventions-ai" },
3111
+ ]
3112
+ sdist = { url = "https://files.pythonhosted.org/packages/82/9b/c9a7dd48d756127e62996c521fa4342370d4fe8838534ff44408e2a18e2c/opentelemetry_instrumentation_sagemaker-0.49.8.tar.gz", hash = "sha256:1a60d3d592765ed8ae5ef15ee380a41ddc8638e9322fb4e258908814f5cd6a18", size = 6878, upload-time = "2025-12-11T20:32:57.181Z" }
3113
+ wheels = [
3114
+ { url = "https://files.pythonhosted.org/packages/f8/6e/79e98c779bd724bc553c09b553077fd17252159b545ad71166b0c0a329fc/opentelemetry_instrumentation_sagemaker-0.49.8-py3-none-any.whl", hash = "sha256:7b1a47a6d25c48b7e213f23a072455a982ffe2c40dafc151abc71fded54b6e0f", size = 9809, upload-time = "2025-12-11T20:32:29.021Z" },
3115
+ ]
3116
+
3117
+ [[package]]
3118
+ name = "opentelemetry-instrumentation-sqlalchemy"
3119
+ version = "0.59b0"
3120
+ source = { registry = "https://pypi.org/simple" }
3121
+ dependencies = [
3122
+ { name = "opentelemetry-api" },
3123
+ { name = "opentelemetry-instrumentation" },
3124
+ { name = "opentelemetry-semantic-conventions" },
3125
+ { name = "packaging" },
3126
+ { name = "wrapt" },
3127
+ ]
3128
+ sdist = { url = "https://files.pythonhosted.org/packages/b9/00/c5222a5e0521772aa530008c6c9c67f453e2b00e97d91fd799e8159aecf5/opentelemetry_instrumentation_sqlalchemy-0.59b0.tar.gz", hash = "sha256:7647b1e63497deebd41f9525c414699e0d49f19efcadc8a0642b715897f62d32", size = 14993, upload-time = "2025-10-16T08:40:01.105Z" }
3129
+ wheels = [
3130
+ { url = "https://files.pythonhosted.org/packages/2d/a9/55d75a3d46c635a48cf3ad3b2599bad1d4ae47eeb1979b19ca47df47dc8c/opentelemetry_instrumentation_sqlalchemy-0.59b0-py3-none-any.whl", hash = "sha256:4ef150c49b6d1a8a7328f9d23ff40c285a245b88b0875ed2e5d277a40aa921c8", size = 14211, upload-time = "2025-10-16T08:39:10.714Z" },
3131
+ ]
3132
+
3133
+ [[package]]
3134
+ name = "opentelemetry-instrumentation-threading"
3135
+ version = "0.59b0"
3136
+ source = { registry = "https://pypi.org/simple" }
3137
+ dependencies = [
3138
+ { name = "opentelemetry-api" },
3139
+ { name = "opentelemetry-instrumentation" },
3140
+ { name = "wrapt" },
3141
+ ]
3142
+ sdist = { url = "https://files.pythonhosted.org/packages/82/7a/84e97d8992808197006e607ae410c2219bdbbc23d1289ba0c244d3220741/opentelemetry_instrumentation_threading-0.59b0.tar.gz", hash = "sha256:ce5658730b697dcbc0e0d6d13643a69fd8aeb1b32fa8db3bade8ce114c7975f3", size = 8770, upload-time = "2025-10-16T08:40:03.587Z" }
3143
+ wheels = [
3144
+ { url = "https://files.pythonhosted.org/packages/b8/50/32d29076aaa1c91983cdd3ca8c6bb4d344830cd7d87a7c0fdc2d98c58509/opentelemetry_instrumentation_threading-0.59b0-py3-none-any.whl", hash = "sha256:76da2fc01fe1dccebff6581080cff9e42ac7b27cc61eb563f3c4435c727e8eca", size = 9313, upload-time = "2025-10-16T08:39:15.876Z" },
3145
+ ]
3146
+
3147
+ [[package]]
3148
+ name = "opentelemetry-instrumentation-together"
3149
+ version = "0.49.8"
3150
+ source = { registry = "https://pypi.org/simple" }
3151
+ dependencies = [
3152
+ { name = "opentelemetry-api" },
3153
+ { name = "opentelemetry-instrumentation" },
3154
+ { name = "opentelemetry-semantic-conventions" },
3155
+ { name = "opentelemetry-semantic-conventions-ai" },
3156
+ ]
3157
+ sdist = { url = "https://files.pythonhosted.org/packages/cc/5a/b99cfe5e8cd2bc8b2c445377e4bf4270e45a5e0ede07f64f9739e075bd84/opentelemetry_instrumentation_together-0.49.8.tar.gz", hash = "sha256:7147e2a66ccc07bf07465f512fb562e555080a630c7717ff8cff988f5ab20388", size = 5675, upload-time = "2025-12-11T20:32:58.307Z" }
3158
+ wheels = [
3159
+ { url = "https://files.pythonhosted.org/packages/47/f2/50568c78032bddf883c5ca6bbe9f27628349685080cd89edff2aaa0a38c6/opentelemetry_instrumentation_together-0.49.8-py3-none-any.whl", hash = "sha256:5bd4d01b8ceb1ae85e936fd7bfba7ab540fd979e7fdbc23f3d6788fb5b46bf72", size = 8704, upload-time = "2025-12-11T20:32:29.985Z" },
3160
+ ]
3161
+
3162
+ [[package]]
3163
+ name = "opentelemetry-instrumentation-transformers"
3164
+ version = "0.49.8"
3165
+ source = { registry = "https://pypi.org/simple" }
3166
+ dependencies = [
3167
+ { name = "opentelemetry-api" },
3168
+ { name = "opentelemetry-instrumentation" },
3169
+ { name = "opentelemetry-semantic-conventions" },
3170
+ { name = "opentelemetry-semantic-conventions-ai" },
3171
+ ]
3172
+ sdist = { url = "https://files.pythonhosted.org/packages/3a/a7/a4394ce9b42aa4d819a59fe7ee7ccaa1733f4fcb40430c6c540b2f981cff/opentelemetry_instrumentation_transformers-0.49.8.tar.gz", hash = "sha256:7ddbba8082a35aaabaf6dbcb3e5fa5a358acc042ef87401f834b1d4233d665a2", size = 5889, upload-time = "2025-12-11T20:32:59.378Z" }
3173
+ wheels = [
3174
+ { url = "https://files.pythonhosted.org/packages/d3/d8/ea758d373217bc5a5f10b4cc0ba9be9cfed4e4cb69343518d3ee5984203a/opentelemetry_instrumentation_transformers-0.49.8-py3-none-any.whl", hash = "sha256:8c29b91ddf6ac7fa58bf2d3923cc9c7698f24adfd917b545849459d2f88ae9f1", size = 8283, upload-time = "2025-12-11T20:32:31.522Z" },
3175
+ ]
3176
+
3177
+ [[package]]
3178
+ name = "opentelemetry-instrumentation-urllib3"
3179
+ version = "0.59b0"
3180
+ source = { registry = "https://pypi.org/simple" }
3181
+ dependencies = [
3182
+ { name = "opentelemetry-api" },
3183
+ { name = "opentelemetry-instrumentation" },
3184
+ { name = "opentelemetry-semantic-conventions" },
3185
+ { name = "opentelemetry-util-http" },
3186
+ { name = "wrapt" },
3187
+ ]
3188
+ sdist = { url = "https://files.pythonhosted.org/packages/94/53/ff93665911808933b1af6fbbb1be2eb83c0c46e3b5f24b0b04c094b5b719/opentelemetry_instrumentation_urllib3-0.59b0.tar.gz", hash = "sha256:2de8d53a746bba043be1bc8f3246e1b131ebb6e94fe73601edd8b2bd91fe35b8", size = 15788, upload-time = "2025-10-16T08:40:05.889Z" }
3189
+ wheels = [
3190
+ { url = "https://files.pythonhosted.org/packages/83/3d/673cbea7aafb93a4613abf3d9c920d7c65a8cad79c910719dc286169bac8/opentelemetry_instrumentation_urllib3-0.59b0-py3-none-any.whl", hash = "sha256:a68c363092cf5db8c67c5778dbb2e4a14554e77baf7d276c374ea75ec926e148", size = 13187, upload-time = "2025-10-16T08:39:20.727Z" },
3191
+ ]
3192
+
3193
+ [[package]]
3194
+ name = "opentelemetry-instrumentation-vertexai"
3195
+ version = "0.49.8"
3196
+ source = { registry = "https://pypi.org/simple" }
3197
+ dependencies = [
3198
+ { name = "opentelemetry-api" },
3199
+ { name = "opentelemetry-instrumentation" },
3200
+ { name = "opentelemetry-semantic-conventions" },
3201
+ { name = "opentelemetry-semantic-conventions-ai" },
3202
+ ]
3203
+ sdist = { url = "https://files.pythonhosted.org/packages/ef/b0/0fa7b85362fa4c8640f0946abdba89c364273fe317fd26c8ce8e8ad38ecc/opentelemetry_instrumentation_vertexai-0.49.8.tar.gz", hash = "sha256:41a1cc41937c6d84b7b595debc86789f151d89aad1334a46a13c66b8e6e36b22", size = 8372, upload-time = "2025-12-11T20:33:00.2Z" }
3204
+ wheels = [
3205
+ { url = "https://files.pythonhosted.org/packages/ee/b1/278131d38927b943e06926ec3a15f35cbbedcfdedb0f7c23af40d85aab9a/opentelemetry_instrumentation_vertexai-0.49.8-py3-none-any.whl", hash = "sha256:b208531f68feed0ceccd5dd39b572e5ca208a4c0044d97a22fe0577a06cc0c0e", size = 10822, upload-time = "2025-12-11T20:32:32.454Z" },
3206
+ ]
3207
+
3208
+ [[package]]
3209
+ name = "opentelemetry-instrumentation-watsonx"
3210
+ version = "0.49.8"
3211
+ source = { registry = "https://pypi.org/simple" }
3212
+ dependencies = [
3213
+ { name = "opentelemetry-api" },
3214
+ { name = "opentelemetry-instrumentation" },
3215
+ { name = "opentelemetry-semantic-conventions" },
3216
+ { name = "opentelemetry-semantic-conventions-ai" },
3217
+ ]
3218
+ sdist = { url = "https://files.pythonhosted.org/packages/6c/93/0e9e43a9d3297044eee0d8c359d2783bd13bb5eeec3cfeef9580fe8246a9/opentelemetry_instrumentation_watsonx-0.49.8.tar.gz", hash = "sha256:ad5334555ff481eea7ae13ba583a8d017c350b6dc3f9ad5062ef99e32d4e48c6", size = 8371, upload-time = "2025-12-11T20:33:00.963Z" }
3219
+ wheels = [
3220
+ { url = "https://files.pythonhosted.org/packages/06/31/372c25d4de91133ab0c14826008a1027c9ccf3c6403ac8e78a86796bfba5/opentelemetry_instrumentation_watsonx-0.49.8-py3-none-any.whl", hash = "sha256:5e92519b359c37a7fd77b6a35dfa18e4c68f74063e7d72ea30c473544276fd25", size = 10273, upload-time = "2025-12-11T20:32:33.872Z" },
3221
+ ]
3222
+
3223
+ [[package]]
3224
+ name = "opentelemetry-instrumentation-weaviate"
3225
+ version = "0.49.8"
3226
+ source = { registry = "https://pypi.org/simple" }
3227
+ dependencies = [
3228
+ { name = "opentelemetry-api" },
3229
+ { name = "opentelemetry-instrumentation" },
3230
+ { name = "opentelemetry-semantic-conventions" },
3231
+ { name = "opentelemetry-semantic-conventions-ai" },
3232
+ ]
3233
+ sdist = { url = "https://files.pythonhosted.org/packages/c2/2c/5d4fa7eeddfbbe1939e68905f76591c31f46ed84886a96a3e122c1cf4a61/opentelemetry_instrumentation_weaviate-0.49.8.tar.gz", hash = "sha256:a46d3f5bb3bf023f86831aae7bee03a4d73770c190be9b40a04b92f5e195adf0", size = 4431, upload-time = "2025-12-11T20:33:01.693Z" }
3234
+ wheels = [
3235
+ { url = "https://files.pythonhosted.org/packages/dc/26/44e91020898d5768865b0a06c75ad0d1c3351573a61dbf15f795926b2b16/opentelemetry_instrumentation_weaviate-0.49.8-py3-none-any.whl", hash = "sha256:9c2469dc96e0b3bcc9caecf9147d7fb844d022151ea673ff60a8c024599f3da6", size = 6409, upload-time = "2025-12-11T20:32:35.453Z" },
3236
+ ]
3237
+
3238
+ [[package]]
3239
+ name = "opentelemetry-instrumentation-writer"
3240
+ version = "0.49.8"
3241
+ source = { registry = "https://pypi.org/simple" }
3242
+ dependencies = [
3243
+ { name = "opentelemetry-api" },
3244
+ { name = "opentelemetry-instrumentation" },
3245
+ { name = "opentelemetry-semantic-conventions" },
3246
+ { name = "opentelemetry-semantic-conventions-ai" },
3247
+ ]
3248
+ sdist = { url = "https://files.pythonhosted.org/packages/c4/d6/53b9ee674baa78dbf546d9303b9fee4ee86b1aeef269d3547ef0e7582fb5/opentelemetry_instrumentation_writer-0.49.8.tar.gz", hash = "sha256:108992f6bc2503029d452641aa23466bdafcb1f32d30c243bf7c097d65d6e7ab", size = 8968, upload-time = "2025-12-11T20:33:02.519Z" }
3249
+ wheels = [
3250
+ { url = "https://files.pythonhosted.org/packages/69/c5/10fd1a5013851d9800c67e509b372f64c74cfc4c09c4dce5418d132ca8dd/opentelemetry_instrumentation_writer-0.49.8-py3-none-any.whl", hash = "sha256:3afc244114088590728d68e9419f9067088cef89fad320ae38ab598e0a2bcb68", size = 11554, upload-time = "2025-12-11T20:32:36.389Z" },
3251
+ ]
3252
+
3253
+ [[package]]
3254
+ name = "opentelemetry-proto"
3255
+ version = "1.38.0"
3256
+ source = { registry = "https://pypi.org/simple" }
3257
+ dependencies = [
3258
+ { name = "protobuf" },
3259
+ ]
3260
+ sdist = { url = "https://files.pythonhosted.org/packages/51/14/f0c4f0f6371b9cb7f9fa9ee8918bfd59ac7040c7791f1e6da32a1839780d/opentelemetry_proto-1.38.0.tar.gz", hash = "sha256:88b161e89d9d372ce723da289b7da74c3a8354a8e5359992be813942969ed468", size = 46152, upload-time = "2025-10-16T08:36:01.612Z" }
3261
+ wheels = [
3262
+ { url = "https://files.pythonhosted.org/packages/b6/6a/82b68b14efca5150b2632f3692d627afa76b77378c4999f2648979409528/opentelemetry_proto-1.38.0-py3-none-any.whl", hash = "sha256:b6ebe54d3217c42e45462e2a1ae28c3e2bf2ec5a5645236a490f55f45f1a0a18", size = 72535, upload-time = "2025-10-16T08:35:45.749Z" },
3263
+ ]
3264
+
3265
+ [[package]]
3266
+ name = "opentelemetry-sdk"
3267
+ version = "1.38.0"
3268
+ source = { registry = "https://pypi.org/simple" }
3269
+ dependencies = [
3270
+ { name = "opentelemetry-api" },
3271
+ { name = "opentelemetry-semantic-conventions" },
3272
+ { name = "typing-extensions" },
3273
+ ]
3274
+ sdist = { url = "https://files.pythonhosted.org/packages/85/cb/f0eee1445161faf4c9af3ba7b848cc22a50a3d3e2515051ad8628c35ff80/opentelemetry_sdk-1.38.0.tar.gz", hash = "sha256:93df5d4d871ed09cb4272305be4d996236eedb232253e3ab864c8620f051cebe", size = 171942, upload-time = "2025-10-16T08:36:02.257Z" }
3275
+ wheels = [
3276
+ { url = "https://files.pythonhosted.org/packages/2f/2e/e93777a95d7d9c40d270a371392b6d6f1ff170c2a3cb32d6176741b5b723/opentelemetry_sdk-1.38.0-py3-none-any.whl", hash = "sha256:1c66af6564ecc1553d72d811a01df063ff097cdc82ce188da9951f93b8d10f6b", size = 132349, upload-time = "2025-10-16T08:35:46.995Z" },
3277
+ ]
3278
+
3279
+ [[package]]
3280
+ name = "opentelemetry-semantic-conventions"
3281
+ version = "0.59b0"
3282
+ source = { registry = "https://pypi.org/simple" }
3283
+ dependencies = [
3284
+ { name = "opentelemetry-api" },
3285
+ { name = "typing-extensions" },
3286
+ ]
3287
+ sdist = { url = "https://files.pythonhosted.org/packages/40/bc/8b9ad3802cd8ac6583a4eb7de7e5d7db004e89cb7efe7008f9c8a537ee75/opentelemetry_semantic_conventions-0.59b0.tar.gz", hash = "sha256:7a6db3f30d70202d5bf9fa4b69bc866ca6a30437287de6c510fb594878aed6b0", size = 129861, upload-time = "2025-10-16T08:36:03.346Z" }
3288
+ wheels = [
3289
+ { url = "https://files.pythonhosted.org/packages/24/7d/c88d7b15ba8fe5c6b8f93be50fc11795e9fc05386c44afaf6b76fe191f9b/opentelemetry_semantic_conventions-0.59b0-py3-none-any.whl", hash = "sha256:35d3b8833ef97d614136e253c1da9342b4c3c083bbaf29ce31d572a1c3825eed", size = 207954, upload-time = "2025-10-16T08:35:48.054Z" },
3290
+ ]
3291
+
3292
+ [[package]]
3293
+ name = "opentelemetry-semantic-conventions-ai"
3294
+ version = "0.4.13"
3295
+ source = { registry = "https://pypi.org/simple" }
3296
+ sdist = { url = "https://files.pythonhosted.org/packages/ba/e6/40b59eda51ac47009fb47afcdf37c6938594a0bd7f3b9fadcbc6058248e3/opentelemetry_semantic_conventions_ai-0.4.13.tar.gz", hash = "sha256:94efa9fb4ffac18c45f54a3a338ffeb7eedb7e1bb4d147786e77202e159f0036", size = 5368, upload-time = "2025-08-22T10:14:17.387Z" }
3297
+ wheels = [
3298
+ { url = "https://files.pythonhosted.org/packages/35/b5/cf25da2218910f0d6cdf7f876a06bed118c4969eacaf60a887cbaef44f44/opentelemetry_semantic_conventions_ai-0.4.13-py3-none-any.whl", hash = "sha256:883a30a6bb5deaec0d646912b5f9f6dcbb9f6f72557b73d0f2560bf25d13e2d5", size = 6080, upload-time = "2025-08-22T10:14:16.477Z" },
3299
+ ]
3300
+
3301
+ [[package]]
3302
+ name = "opentelemetry-util-http"
3303
+ version = "0.59b0"
3304
+ source = { registry = "https://pypi.org/simple" }
3305
+ sdist = { url = "https://files.pythonhosted.org/packages/34/f7/13cd081e7851c42520ab0e96efb17ffbd901111a50b8252ec1e240664020/opentelemetry_util_http-0.59b0.tar.gz", hash = "sha256:ae66ee91be31938d832f3b4bc4eb8a911f6eddd38969c4a871b1230db2a0a560", size = 9412, upload-time = "2025-10-16T08:40:11.335Z" }
3306
+ wheels = [
3307
+ { url = "https://files.pythonhosted.org/packages/20/56/62282d1d4482061360449dacc990c89cad0fc810a2ed937b636300f55023/opentelemetry_util_http-0.59b0-py3-none-any.whl", hash = "sha256:6d036a07563bce87bf521839c0671b507a02a0d39d7ea61b88efa14c6e25355d", size = 7648, upload-time = "2025-10-16T08:39:25.706Z" },
3308
+ ]
3309
+
3310
+ [[package]]
3311
+ name = "orjson"
3312
+ version = "3.11.4"
3313
+ source = { registry = "https://pypi.org/simple" }
3314
+ sdist = { url = "https://files.pythonhosted.org/packages/c6/fe/ed708782d6709cc60eb4c2d8a361a440661f74134675c72990f2c48c785f/orjson-3.11.4.tar.gz", hash = "sha256:39485f4ab4c9b30a3943cfe99e1a213c4776fb69e8abd68f66b83d5a0b0fdc6d", size = 5945188, upload-time = "2025-10-24T15:50:38.027Z" }
3315
+ wheels = [
3316
+ { url = "https://files.pythonhosted.org/packages/63/51/6b556192a04595b93e277a9ff71cd0cc06c21a7df98bcce5963fa0f5e36f/orjson-3.11.4-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:d4371de39319d05d3f482f372720b841c841b52f5385bd99c61ed69d55d9ab50", size = 243571, upload-time = "2025-10-24T15:49:10.008Z" },
3317
+ { url = "https://files.pythonhosted.org/packages/1c/2c/2602392ddf2601d538ff11848b98621cd465d1a1ceb9db9e8043181f2f7b/orjson-3.11.4-cp312-cp312-macosx_15_0_arm64.whl", hash = "sha256:e41fd3b3cac850eaae78232f37325ed7d7436e11c471246b87b2cd294ec94853", size = 128891, upload-time = "2025-10-24T15:49:11.297Z" },
3318
+ { url = "https://files.pythonhosted.org/packages/4e/47/bf85dcf95f7a3a12bf223394a4f849430acd82633848d52def09fa3f46ad/orjson-3.11.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:600e0e9ca042878c7fdf189cf1b028fe2c1418cc9195f6cb9824eb6ed99cb938", size = 130137, upload-time = "2025-10-24T15:49:12.544Z" },
3319
+ { url = "https://files.pythonhosted.org/packages/b4/4d/a0cb31007f3ab6f1fd2a1b17057c7c349bc2baf8921a85c0180cc7be8011/orjson-3.11.4-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7bbf9b333f1568ef5da42bc96e18bf30fd7f8d54e9ae066d711056add508e415", size = 129152, upload-time = "2025-10-24T15:49:13.754Z" },
3320
+ { url = "https://files.pythonhosted.org/packages/f7/ef/2811def7ce3d8576b19e3929fff8f8f0d44bc5eb2e0fdecb2e6e6cc6c720/orjson-3.11.4-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4806363144bb6e7297b8e95870e78d30a649fdc4e23fc84daa80c8ebd366ce44", size = 136834, upload-time = "2025-10-24T15:49:15.307Z" },
3321
+ { url = "https://files.pythonhosted.org/packages/00/d4/9aee9e54f1809cec8ed5abd9bc31e8a9631d19460e3b8470145d25140106/orjson-3.11.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad355e8308493f527d41154e9053b86a5be892b3b359a5c6d5d95cda23601cb2", size = 137519, upload-time = "2025-10-24T15:49:16.557Z" },
3322
+ { url = "https://files.pythonhosted.org/packages/db/ea/67bfdb5465d5679e8ae8d68c11753aaf4f47e3e7264bad66dc2f2249e643/orjson-3.11.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c8a7517482667fb9f0ff1b2f16fe5829296ed7a655d04d68cd9711a4d8a4e708", size = 136749, upload-time = "2025-10-24T15:49:17.796Z" },
3323
+ { url = "https://files.pythonhosted.org/packages/01/7e/62517dddcfce6d53a39543cd74d0dccfcbdf53967017c58af68822100272/orjson-3.11.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:97eb5942c7395a171cbfecc4ef6701fc3c403e762194683772df4c54cfbb2210", size = 136325, upload-time = "2025-10-24T15:49:19.347Z" },
3324
+ { url = "https://files.pythonhosted.org/packages/18/ae/40516739f99ab4c7ec3aaa5cc242d341fcb03a45d89edeeaabc5f69cb2cf/orjson-3.11.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:149d95d5e018bdd822e3f38c103b1a7c91f88d38a88aada5c4e9b3a73a244241", size = 140204, upload-time = "2025-10-24T15:49:20.545Z" },
3325
+ { url = "https://files.pythonhosted.org/packages/82/18/ff5734365623a8916e3a4037fcef1cd1782bfc14cf0992afe7940c5320bf/orjson-3.11.4-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:624f3951181eb46fc47dea3d221554e98784c823e7069edb5dbd0dc826ac909b", size = 406242, upload-time = "2025-10-24T15:49:21.884Z" },
3326
+ { url = "https://files.pythonhosted.org/packages/e1/43/96436041f0a0c8c8deca6a05ebeaf529bf1de04839f93ac5e7c479807aec/orjson-3.11.4-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:03bfa548cf35e3f8b3a96c4e8e41f753c686ff3d8e182ce275b1751deddab58c", size = 150013, upload-time = "2025-10-24T15:49:23.185Z" },
3327
+ { url = "https://files.pythonhosted.org/packages/1b/48/78302d98423ed8780479a1e682b9aecb869e8404545d999d34fa486e573e/orjson-3.11.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:525021896afef44a68148f6ed8a8bf8375553d6066c7f48537657f64823565b9", size = 139951, upload-time = "2025-10-24T15:49:24.428Z" },
3328
+ { url = "https://files.pythonhosted.org/packages/4a/7b/ad613fdcdaa812f075ec0875143c3d37f8654457d2af17703905425981bf/orjson-3.11.4-cp312-cp312-win32.whl", hash = "sha256:b58430396687ce0f7d9eeb3dd47761ca7d8fda8e9eb92b3077a7a353a75efefa", size = 136049, upload-time = "2025-10-24T15:49:25.973Z" },
3329
+ { url = "https://files.pythonhosted.org/packages/b9/3c/9cf47c3ff5f39b8350fb21ba65d789b6a1129d4cbb3033ba36c8a9023520/orjson-3.11.4-cp312-cp312-win_amd64.whl", hash = "sha256:c6dbf422894e1e3c80a177133c0dda260f81428f9de16d61041949f6a2e5c140", size = 131461, upload-time = "2025-10-24T15:49:27.259Z" },
3330
+ { url = "https://files.pythonhosted.org/packages/c6/3b/e2425f61e5825dc5b08c2a5a2b3af387eaaca22a12b9c8c01504f8614c36/orjson-3.11.4-cp312-cp312-win_arm64.whl", hash = "sha256:d38d2bc06d6415852224fcc9c0bfa834c25431e466dc319f0edd56cca81aa96e", size = 126167, upload-time = "2025-10-24T15:49:28.511Z" },
3331
+ ]
3332
+
3333
+ [[package]]
3334
+ name = "ormsgpack"
3335
+ version = "1.12.0"
3336
+ source = { registry = "https://pypi.org/simple" }
3337
+ sdist = { url = "https://files.pythonhosted.org/packages/6c/67/d5ef41c3b4a94400be801984ef7c7fc9623e1a82b643e74eeec367e7462b/ormsgpack-1.12.0.tar.gz", hash = "sha256:94be818fdbb0285945839b88763b269987787cb2f7ef280cad5d6ec815b7e608", size = 49959, upload-time = "2025-11-04T18:30:10.083Z" }
3338
+ wheels = [
3339
+ { url = "https://files.pythonhosted.org/packages/a2/f2/c1036b2775fcc0cfa5fd618c53bcd3b862ee07298fb627f03af4c7982f84/ormsgpack-1.12.0-cp312-cp312-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:e0c1e08b64d99076fee155276097489b82cc56e8d5951c03c721a65a32f44494", size = 369538, upload-time = "2025-11-04T18:29:37.125Z" },
3340
+ { url = "https://files.pythonhosted.org/packages/d9/ca/526c4ae02f3cb34621af91bf8282a10d666757c2e0c6ff391ff5d403d607/ormsgpack-1.12.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3fd43bcb299131690b8e0677af172020b2ada8e625169034b42ac0c13adf84aa", size = 195872, upload-time = "2025-11-04T18:29:38.34Z" },
3341
+ { url = "https://files.pythonhosted.org/packages/7f/0f/83bb7968e9715f6a85be53d041b1e6324a05428f56b8b980dac866886871/ormsgpack-1.12.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5f0149d595341e22ead340bf281b2995c4cc7dc8d522a6b5f575fe17aa407604", size = 206469, upload-time = "2025-11-04T18:29:39.749Z" },
3342
+ { url = "https://files.pythonhosted.org/packages/02/e3/9e93ca1065f2d4af035804a842b1ff3025bab580c7918239bb225cd1fee2/ormsgpack-1.12.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f19a1b27d169deb553c80fd10b589fc2be1fc14cee779fae79fcaf40db04de2b", size = 208273, upload-time = "2025-11-04T18:29:40.769Z" },
3343
+ { url = "https://files.pythonhosted.org/packages/b3/d8/6d6ef901b3a8b8f3ab8836b135a56eb7f66c559003e251d9530bedb12627/ormsgpack-1.12.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6f28896942d655064940dfe06118b7ce1e3468d051483148bf02c99ec157483a", size = 377839, upload-time = "2025-11-04T18:29:42.092Z" },
3344
+ { url = "https://files.pythonhosted.org/packages/4c/72/fcb704bfa4c2c3a37b647d597cc45a13cffc9d50baac635a9ad620731d29/ormsgpack-1.12.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:9396efcfa48b4abbc06e44c5dbc3c4574a8381a80cb4cd01eea15d28b38c554e", size = 471446, upload-time = "2025-11-04T18:29:43.133Z" },
3345
+ { url = "https://files.pythonhosted.org/packages/84/f8/402e4e3eb997c2ee534c99bec4b5bb359c2a1f9edadf043e254a71e11378/ormsgpack-1.12.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:96586ed537a5fb386a162c4f9f7d8e6f76e07b38a990d50c73f11131e00ff040", size = 381783, upload-time = "2025-11-04T18:29:44.466Z" },
3346
+ { url = "https://files.pythonhosted.org/packages/f0/8d/5897b700360bc00911b70ae5ef1134ee7abf5baa81a92a4be005917d3dfd/ormsgpack-1.12.0-cp312-cp312-win_amd64.whl", hash = "sha256:e70387112fb3870e4844de090014212cdcf1342f5022047aecca01ec7de05d7a", size = 112943, upload-time = "2025-11-04T18:29:45.468Z" },
3347
+ { url = "https://files.pythonhosted.org/packages/5b/44/1e73649f79bb96d6cf9e5bcbac68b6216d238bba80af351c4c0cbcf7ee15/ormsgpack-1.12.0-cp312-cp312-win_arm64.whl", hash = "sha256:d71290a23de5d4829610c42665d816c661ecad8979883f3f06b2e3ab9639962e", size = 106688, upload-time = "2025-11-04T18:29:46.411Z" },
3348
+ ]
3349
+
3350
+ [[package]]
3351
+ name = "overrides"
3352
+ version = "7.7.0"
3353
+ source = { registry = "https://pypi.org/simple" }
3354
+ sdist = { url = "https://files.pythonhosted.org/packages/36/86/b585f53236dec60aba864e050778b25045f857e17f6e5ea0ae95fe80edd2/overrides-7.7.0.tar.gz", hash = "sha256:55158fa3d93b98cc75299b1e67078ad9003ca27945c76162c1c0766d6f91820a", size = 22812, upload-time = "2024-01-27T21:01:33.423Z" }
3355
+ wheels = [
3356
+ { url = "https://files.pythonhosted.org/packages/2c/ab/fc8290c6a4c722e5514d80f62b2dc4c4df1a68a41d1364e625c35990fcf3/overrides-7.7.0-py3-none-any.whl", hash = "sha256:c7ed9d062f78b8e4c1a7b70bd8796b35ead4d9f510227ef9c5dc7626c60d7e49", size = 17832, upload-time = "2024-01-27T21:01:31.393Z" },
3357
+ ]
3358
+
3359
+ [[package]]
3360
+ name = "packaging"
3361
+ version = "25.0"
3362
+ source = { registry = "https://pypi.org/simple" }
3363
+ sdist = { url = "https://files.pythonhosted.org/packages/a1/d4/1fc4078c65507b51b96ca8f8c3ba19e6a61c8253c72794544580a7b6c24d/packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f", size = 165727, upload-time = "2025-04-19T11:48:59.673Z" }
3364
+ wheels = [
3365
+ { url = "https://files.pythonhosted.org/packages/20/12/38679034af332785aac8774540895e234f4d07f7545804097de4b666afd8/packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", size = 66469, upload-time = "2025-04-19T11:48:57.875Z" },
3366
+ ]
3367
+
3368
+ [[package]]
3369
+ name = "pandas"
3370
+ version = "2.3.3"
3371
+ source = { registry = "https://pypi.org/simple" }
3372
+ dependencies = [
3373
+ { name = "numpy" },
3374
+ { name = "python-dateutil" },
3375
+ { name = "pytz" },
3376
+ { name = "tzdata" },
3377
+ ]
3378
+ sdist = { url = "https://files.pythonhosted.org/packages/33/01/d40b85317f86cf08d853a4f495195c73815fdf205eef3993821720274518/pandas-2.3.3.tar.gz", hash = "sha256:e05e1af93b977f7eafa636d043f9f94c7ee3ac81af99c13508215942e64c993b", size = 4495223, upload-time = "2025-09-29T23:34:51.853Z" }
3379
+ wheels = [
3380
+ { url = "https://files.pythonhosted.org/packages/9c/fb/231d89e8637c808b997d172b18e9d4a4bc7bf31296196c260526055d1ea0/pandas-2.3.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6d21f6d74eb1725c2efaa71a2bfc661a0689579b58e9c0ca58a739ff0b002b53", size = 11597846, upload-time = "2025-09-29T23:19:48.856Z" },
3381
+ { url = "https://files.pythonhosted.org/packages/5c/bd/bf8064d9cfa214294356c2d6702b716d3cf3bb24be59287a6a21e24cae6b/pandas-2.3.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3fd2f887589c7aa868e02632612ba39acb0b8948faf5cc58f0850e165bd46f35", size = 10729618, upload-time = "2025-09-29T23:39:08.659Z" },
3382
+ { url = "https://files.pythonhosted.org/packages/57/56/cf2dbe1a3f5271370669475ead12ce77c61726ffd19a35546e31aa8edf4e/pandas-2.3.3-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ecaf1e12bdc03c86ad4a7ea848d66c685cb6851d807a26aa245ca3d2017a1908", size = 11737212, upload-time = "2025-09-29T23:19:59.765Z" },
3383
+ { url = "https://files.pythonhosted.org/packages/e5/63/cd7d615331b328e287d8233ba9fdf191a9c2d11b6af0c7a59cfcec23de68/pandas-2.3.3-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b3d11d2fda7eb164ef27ffc14b4fcab16a80e1ce67e9f57e19ec0afaf715ba89", size = 12362693, upload-time = "2025-09-29T23:20:14.098Z" },
3384
+ { url = "https://files.pythonhosted.org/packages/a6/de/8b1895b107277d52f2b42d3a6806e69cfef0d5cf1d0ba343470b9d8e0a04/pandas-2.3.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a68e15f780eddf2b07d242e17a04aa187a7ee12b40b930bfdd78070556550e98", size = 12771002, upload-time = "2025-09-29T23:20:26.76Z" },
3385
+ { url = "https://files.pythonhosted.org/packages/87/21/84072af3187a677c5893b170ba2c8fbe450a6ff911234916da889b698220/pandas-2.3.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:371a4ab48e950033bcf52b6527eccb564f52dc826c02afd9a1bc0ab731bba084", size = 13450971, upload-time = "2025-09-29T23:20:41.344Z" },
3386
+ { url = "https://files.pythonhosted.org/packages/86/41/585a168330ff063014880a80d744219dbf1dd7a1c706e75ab3425a987384/pandas-2.3.3-cp312-cp312-win_amd64.whl", hash = "sha256:a16dcec078a01eeef8ee61bf64074b4e524a2a3f4b3be9326420cabe59c4778b", size = 10992722, upload-time = "2025-09-29T23:20:54.139Z" },
3387
+ ]
3388
+
3389
+ [[package]]
3390
+ name = "parso"
3391
+ version = "0.8.5"
3392
+ source = { registry = "https://pypi.org/simple" }
3393
+ sdist = { url = "https://files.pythonhosted.org/packages/d4/de/53e0bcf53d13e005bd8c92e7855142494f41171b34c2536b86187474184d/parso-0.8.5.tar.gz", hash = "sha256:034d7354a9a018bdce352f48b2a8a450f05e9d6ee85db84764e9b6bd96dafe5a", size = 401205, upload-time = "2025-08-23T15:15:28.028Z" }
3394
+ wheels = [
3395
+ { url = "https://files.pythonhosted.org/packages/16/32/f8e3c85d1d5250232a5d3477a2a28cc291968ff175caeadaf3cc19ce0e4a/parso-0.8.5-py2.py3-none-any.whl", hash = "sha256:646204b5ee239c396d040b90f9e272e9a8017c630092bf59980beb62fd033887", size = 106668, upload-time = "2025-08-23T15:15:25.663Z" },
3396
+ ]
3397
+
3398
+ [[package]]
3399
+ name = "peewee"
3400
+ version = "3.18.3"
3401
+ source = { registry = "https://pypi.org/simple" }
3402
+ sdist = { url = "https://files.pythonhosted.org/packages/6f/60/58e7a307a24044e0e982b99042fcd5a58d0cd928d9c01829574d7553ee8d/peewee-3.18.3.tar.gz", hash = "sha256:62c3d93315b1a909360c4b43c3a573b47557a1ec7a4583a71286df2a28d4b72e", size = 3026296, upload-time = "2025-11-03T16:43:46.678Z" }
3403
+
3404
+ [[package]]
3405
+ name = "pexpect"
3406
+ version = "4.9.0"
3407
+ source = { registry = "https://pypi.org/simple" }
3408
+ dependencies = [
3409
+ { name = "ptyprocess" },
3410
+ ]
3411
+ sdist = { url = "https://files.pythonhosted.org/packages/42/92/cc564bf6381ff43ce1f4d06852fc19a2f11d180f23dc32d9588bee2f149d/pexpect-4.9.0.tar.gz", hash = "sha256:ee7d41123f3c9911050ea2c2dac107568dc43b2d3b0c7557a33212c398ead30f", size = 166450, upload-time = "2023-11-25T09:07:26.339Z" }
3412
+ wheels = [
3413
+ { url = "https://files.pythonhosted.org/packages/9e/c3/059298687310d527a58bb01f3b1965787ee3b40dce76752eda8b44e9a2c5/pexpect-4.9.0-py2.py3-none-any.whl", hash = "sha256:7236d1e080e4936be2dc3e326cec0af72acf9212a7e1d060210e70a47e253523", size = 63772, upload-time = "2023-11-25T06:56:14.81Z" },
3414
+ ]
3415
+
3416
+ [[package]]
3417
+ name = "pillow"
3418
+ version = "12.0.0"
3419
+ source = { registry = "https://pypi.org/simple" }
3420
+ sdist = { url = "https://files.pythonhosted.org/packages/5a/b0/cace85a1b0c9775a9f8f5d5423c8261c858760e2466c79b2dd184638b056/pillow-12.0.0.tar.gz", hash = "sha256:87d4f8125c9988bfbed67af47dd7a953e2fc7b0cc1e7800ec6d2080d490bb353", size = 47008828, upload-time = "2025-10-15T18:24:14.008Z" }
3421
+ wheels = [
3422
+ { url = "https://files.pythonhosted.org/packages/2c/90/4fcce2c22caf044e660a198d740e7fbc14395619e3cb1abad12192c0826c/pillow-12.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:53561a4ddc36facb432fae7a9d8afbfaf94795414f5cdc5fc52f28c1dca90371", size = 5249377, upload-time = "2025-10-15T18:22:05.993Z" },
3423
+ { url = "https://files.pythonhosted.org/packages/fd/e0/ed960067543d080691d47d6938ebccbf3976a931c9567ab2fbfab983a5dd/pillow-12.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:71db6b4c1653045dacc1585c1b0d184004f0d7e694c7b34ac165ca70c0838082", size = 4650343, upload-time = "2025-10-15T18:22:07.718Z" },
3424
+ { url = "https://files.pythonhosted.org/packages/e7/a1/f81fdeddcb99c044bf7d6faa47e12850f13cee0849537a7d27eeab5534d4/pillow-12.0.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:2fa5f0b6716fc88f11380b88b31fe591a06c6315e955c096c35715788b339e3f", size = 6232981, upload-time = "2025-10-15T18:22:09.287Z" },
3425
  { url = "https://files.pythonhosted.org/packages/88/e1/9098d3ce341a8750b55b0e00c03f1630d6178f38ac191c81c97a3b047b44/pillow-12.0.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:82240051c6ca513c616f7f9da06e871f61bfd7805f566275841af15015b8f98d", size = 8041399, upload-time = "2025-10-15T18:22:10.872Z" },
3426
  { url = "https://files.pythonhosted.org/packages/a7/62/a22e8d3b602ae8cc01446d0c57a54e982737f44b6f2e1e019a925143771d/pillow-12.0.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:55f818bd74fe2f11d4d7cbc65880a843c4075e0ac7226bc1a23261dbea531953", size = 6347740, upload-time = "2025-10-15T18:22:12.769Z" },
3427
  { url = "https://files.pythonhosted.org/packages/4f/87/424511bdcd02c8d7acf9f65caa09f291a519b16bd83c3fb3374b3d4ae951/pillow-12.0.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b87843e225e74576437fd5b6a4c2205d422754f84a06942cfaf1dc32243e45a8", size = 7040201, upload-time = "2025-10-15T18:22:14.813Z" },
 
4596
  { url = "https://files.pythonhosted.org/packages/d0/30/dc54f88dd4a2b5dc8a0279bdd7270e735851848b762aeb1c1184ed1f6b14/tqdm-4.67.1-py3-none-any.whl", hash = "sha256:26445eca388f82e72884e0d580d5464cd801a3ea01e63e5601bdff9ba6a48de2", size = 78540, upload-time = "2024-11-24T20:12:19.698Z" },
4597
  ]
4598
 
4599
+ [[package]]
4600
+ name = "traceloop-sdk"
4601
+ version = "0.49.8"
4602
+ source = { registry = "https://pypi.org/simple" }
4603
+ dependencies = [
4604
+ { name = "aiohttp" },
4605
+ { name = "colorama" },
4606
+ { name = "cuid" },
4607
+ { name = "deprecated" },
4608
+ { name = "jinja2" },
4609
+ { name = "opentelemetry-api" },
4610
+ { name = "opentelemetry-exporter-otlp-proto-grpc" },
4611
+ { name = "opentelemetry-exporter-otlp-proto-http" },
4612
+ { name = "opentelemetry-instrumentation-agno" },
4613
+ { name = "opentelemetry-instrumentation-alephalpha" },
4614
+ { name = "opentelemetry-instrumentation-anthropic" },
4615
+ { name = "opentelemetry-instrumentation-bedrock" },
4616
+ { name = "opentelemetry-instrumentation-chromadb" },
4617
+ { name = "opentelemetry-instrumentation-cohere" },
4618
+ { name = "opentelemetry-instrumentation-crewai" },
4619
+ { name = "opentelemetry-instrumentation-google-generativeai" },
4620
+ { name = "opentelemetry-instrumentation-groq" },
4621
+ { name = "opentelemetry-instrumentation-haystack" },
4622
+ { name = "opentelemetry-instrumentation-lancedb" },
4623
+ { name = "opentelemetry-instrumentation-langchain" },
4624
+ { name = "opentelemetry-instrumentation-llamaindex" },
4625
+ { name = "opentelemetry-instrumentation-logging" },
4626
+ { name = "opentelemetry-instrumentation-marqo" },
4627
+ { name = "opentelemetry-instrumentation-mcp" },
4628
+ { name = "opentelemetry-instrumentation-milvus" },
4629
+ { name = "opentelemetry-instrumentation-mistralai" },
4630
+ { name = "opentelemetry-instrumentation-ollama" },
4631
+ { name = "opentelemetry-instrumentation-openai" },
4632
+ { name = "opentelemetry-instrumentation-openai-agents" },
4633
+ { name = "opentelemetry-instrumentation-pinecone" },
4634
+ { name = "opentelemetry-instrumentation-qdrant" },
4635
+ { name = "opentelemetry-instrumentation-redis" },
4636
+ { name = "opentelemetry-instrumentation-replicate" },
4637
+ { name = "opentelemetry-instrumentation-requests" },
4638
+ { name = "opentelemetry-instrumentation-sagemaker" },
4639
+ { name = "opentelemetry-instrumentation-sqlalchemy" },
4640
+ { name = "opentelemetry-instrumentation-threading" },
4641
+ { name = "opentelemetry-instrumentation-together" },
4642
+ { name = "opentelemetry-instrumentation-transformers" },
4643
+ { name = "opentelemetry-instrumentation-urllib3" },
4644
+ { name = "opentelemetry-instrumentation-vertexai" },
4645
+ { name = "opentelemetry-instrumentation-watsonx" },
4646
+ { name = "opentelemetry-instrumentation-weaviate" },
4647
+ { name = "opentelemetry-instrumentation-writer" },
4648
+ { name = "opentelemetry-sdk" },
4649
+ { name = "opentelemetry-semantic-conventions-ai" },
4650
+ { name = "pydantic" },
4651
+ { name = "tenacity" },
4652
+ ]
4653
+ sdist = { url = "https://files.pythonhosted.org/packages/45/28/941a57d5a5d746a1e37f401333597f5cf74414f1ea9cb76e961f1218cb53/traceloop_sdk-0.49.8.tar.gz", hash = "sha256:090c67e0aa19275e931fe51b16811982c6a7d8bba1aa6c8b2930ef9399bb9789", size = 45673, upload-time = "2025-12-11T20:34:25.175Z" }
4654
+ wheels = [
4655
+ { url = "https://files.pythonhosted.org/packages/17/a8/a2a28001a2969080040754b8edaa54412cfaddd7c868261d6e4afb1fdcc0/traceloop_sdk-0.49.8-py3-none-any.whl", hash = "sha256:6fed103e0b261a7be976fa178e18c0b5ffd4022127ad159b3cb5092b6113baa0", size = 61912, upload-time = "2025-12-11T20:34:23.869Z" },
4656
+ ]
4657
+
4658
  [[package]]
4659
  name = "traitlets"
4660
  version = "5.14.3"