mishrabp commited on
Commit
6a5a0e6
Β·
verified Β·
1 Parent(s): a3d6a08

Upload folder using huggingface_hub

Browse files
common/utility/autogen_model_factory.py CHANGED
@@ -51,23 +51,45 @@ class AutoGenModelFactory:
51
  # GOOGLE (GEMINI) via OpenAI Compat
52
  # ----------------------------------------------------------------------
53
  elif provider.lower() == "google" or provider.lower() == "gemini":
 
 
 
 
 
 
 
 
 
54
  return OpenAIChatCompletionClient(
55
  model=model_name,
56
  base_url="https://generativelanguage.googleapis.com/v1beta/openai/",
57
  api_key=os.environ["GOOGLE_API_KEY"],
58
- model_info=model_info, # Pass full model_info for capabilities
59
  temperature=temperature,
 
 
60
  )
61
 
62
  # ----------------------------------------------------------------------
63
  # GROQ
64
  # ----------------------------------------------------------------------
65
  elif provider.lower() == "groq":
 
 
 
 
 
 
 
 
 
66
  return OpenAIChatCompletionClient(
67
  model=model_name,
68
  base_url="https://api.groq.com/openai/v1",
69
  api_key=os.environ["GROQ_API_KEY"],
 
70
  temperature=temperature,
 
71
  )
72
 
73
  # ----------------------------------------------------------------------
 
51
  # GOOGLE (GEMINI) via OpenAI Compat
52
  # ----------------------------------------------------------------------
53
  elif provider.lower() == "google" or provider.lower() == "gemini":
54
+ if model_info is None:
55
+ model_info = {
56
+ "family": "gpt",
57
+ "vision": False,
58
+ "function_calling": True,
59
+ "json_output": True,
60
+ "structured_output": False
61
+ }
62
+
63
  return OpenAIChatCompletionClient(
64
  model=model_name,
65
  base_url="https://generativelanguage.googleapis.com/v1beta/openai/",
66
  api_key=os.environ["GOOGLE_API_KEY"],
67
+ model_info=model_info,
68
  temperature=temperature,
69
+ max_tokens=2048,
70
+ extra_headers={"x-goog-api-key": os.environ["GOOGLE_API_KEY"]}
71
  )
72
 
73
  # ----------------------------------------------------------------------
74
  # GROQ
75
  # ----------------------------------------------------------------------
76
  elif provider.lower() == "groq":
77
+ if model_info is None:
78
+ model_info = {
79
+ "family": "llama", # Use llama family for Groq
80
+ "vision": False,
81
+ "function_calling": True,
82
+ "json_output": True,
83
+ "structured_output": False
84
+ }
85
+
86
  return OpenAIChatCompletionClient(
87
  model=model_name,
88
  base_url="https://api.groq.com/openai/v1",
89
  api_key=os.environ["GROQ_API_KEY"],
90
+ model_info=model_info,
91
  temperature=temperature,
92
+ max_tokens=2048
93
  )
94
 
95
  # ----------------------------------------------------------------------
run.py CHANGED
@@ -18,7 +18,7 @@ import subprocess
18
  import argparse
19
  from pathlib import Path
20
  from typing import Dict, Optional
21
- from agents import Runner, SQLiteSession
22
  # from agents import set_trace_processors
23
  # from langsmith.wrappers import OpenAIAgentsTracingProcessor
24
 
@@ -56,6 +56,7 @@ APP_REGISTRY: Dict[str, Dict[str, str]] = {
56
  "trip-planner": {
57
  "path": "src/trip-planner",
58
  "entry": "main.py",
 
59
  "description": "Trip Planner - Detailed trip itinerary planning"
60
  },
61
  "chatbot_v1": {
@@ -85,8 +86,9 @@ APP_REGISTRY: Dict[str, Dict[str, str]] = {
85
  },
86
  "market-analyst": {
87
  "path": "src/market-analyst",
88
- "entry": "app.py",
89
- "description": "Market Analyst - Multi-agent market analysis tool"
 
90
  },
91
  "image": {
92
  "path": "src/image-generator",
@@ -177,15 +179,52 @@ def launch_app(app_name: str, port: Optional[int] = None):
177
  print(f"πŸ“‚ Location: {config['path']}")
178
  print(f"🌐 Entry Point: {app_file}")
179
 
180
- # Build streamlit command
181
- cmd = ["streamlit", "run", app_file]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
182
 
183
  # Add port if specified
184
  if port:
185
- cmd.extend(["--server.port", str(port)])
 
 
 
186
  print(f"πŸ”Œ Port: {port}")
187
  else:
188
- print(f"πŸ”Œ Port: 8501 (default)")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
189
 
190
  print("\n" + "=" * 70)
191
  print("\n🎯 Starting application...\n")
@@ -202,8 +241,9 @@ def launch_app(app_name: str, port: Optional[int] = None):
202
  except KeyboardInterrupt:
203
  print("\n\nπŸ‘‹ Application stopped by user")
204
  except FileNotFoundError:
205
- print("\n❌ Error: Streamlit not found. Please install it:")
206
- print(" pip install streamlit")
 
207
  sys.exit(1)
208
  except Exception as e:
209
  print(f"\n❌ Error launching app: {e}")
 
18
  import argparse
19
  from pathlib import Path
20
  from typing import Dict, Optional
21
+ # from agents import Runner, SQLiteSession
22
  # from agents import set_trace_processors
23
  # from langsmith.wrappers import OpenAIAgentsTracingProcessor
24
 
 
56
  "trip-planner": {
57
  "path": "src/trip-planner",
58
  "entry": "main.py",
59
+ "type": "fastapi",
60
  "description": "Trip Planner - Detailed trip itinerary planning"
61
  },
62
  "chatbot_v1": {
 
86
  },
87
  "market-analyst": {
88
  "path": "src/market-analyst",
89
+ "entry": "backend/main.py",
90
+ "type": "fastapi",
91
+ "description": "Market Analyst - Decoupled Multi-agent market analysis (Vue.js + FastAPI)"
92
  },
93
  "image": {
94
  "path": "src/image-generator",
 
179
  print(f"πŸ“‚ Location: {config['path']}")
180
  print(f"🌐 Entry Point: {app_file}")
181
 
182
+ app_type = config.get("type", "streamlit")
183
+
184
+ # Decoupled App Logic: Build frontend if needed
185
+ if app_name == "market-analyst":
186
+ frontend_dir = project_root / "src/market-analyst/frontend"
187
+ dist_dir = frontend_dir / "dist"
188
+ if not dist_dir.exists():
189
+ print("\nπŸ› οΈ Frontend build missing. Building now...")
190
+ subprocess.run(["npm", "run", "build"], cwd=frontend_dir, shell=True)
191
+ print("βœ… Frontend built.\n")
192
+
193
+ python_exe = sys.executable
194
+
195
+ # Build command based on app type
196
+ if app_type == "fastapi":
197
+ # Extract module name from entry point (e.g. backend/main.py -> backend.main)
198
+ module_path = app_file.replace(".py", "").replace("/", ".").replace("\\", ".")
199
+ cmd = [python_exe, "-m", "uvicorn", f"{module_path}:app", "--host", "0.0.0.0"]
200
+ default_port = 8000
201
+ else:
202
+ cmd = [python_exe, "-m", "streamlit", "run", app_file]
203
+ default_port = 8501
204
 
205
  # Add port if specified
206
  if port:
207
+ if app_type == "fastapi":
208
+ cmd.extend(["--port", str(port)])
209
+ else:
210
+ cmd.extend(["--server.port", str(port)])
211
  print(f"πŸ”Œ Port: {port}")
212
  else:
213
+ print(f"πŸ”Œ Port: {default_port} (default)")
214
+
215
+ # Determine the actual port to use
216
+ actual_port = port if port else default_port
217
+
218
+ # Kill any process using the target port
219
+ try:
220
+ import platform
221
+ if platform.system() != "Windows":
222
+ # Use fuser on Linux/Mac to kill processes on the port
223
+ kill_cmd = ["fuser", "-k", f"{actual_port}/tcp"]
224
+ subprocess.run(kill_cmd, stderr=subprocess.DEVNULL, stdout=subprocess.DEVNULL)
225
+ print(f"🧹 Cleaned up port {actual_port}")
226
+ except Exception:
227
+ pass # Silently continue if cleanup fails
228
 
229
  print("\n" + "=" * 70)
230
  print("\n🎯 Starting application...\n")
 
241
  except KeyboardInterrupt:
242
  print("\n\nπŸ‘‹ Application stopped by user")
243
  except FileNotFoundError:
244
+ binary = "uvicorn" if app_type == "fastapi" else "streamlit"
245
+ print(f"\n❌ Error: {binary} not found in the current environment.")
246
+ print(f" Please install it: pip install {binary}")
247
  sys.exit(1)
248
  except Exception as e:
249
  print(f"\n❌ Error launching app: {e}")
src/interview-assistant/data/interview_rag_db/72b4b7ac-2c9d-43e6-84dd-0f3aed61b719/length.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:80cab3d510c7cb7c0295be973a9ef768fff8a3e278b39013ff3864b33671b88c
3
  size 400
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:87fd91c23e77b14904e335a87cde5d1ef5985de4c61786831826ef13ce151c0c
3
  size 400
src/interview-assistant/data/interview_rag_db/chroma.sqlite3 CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:5921d98369b133d6832b2806d7934bfc01ea77768c9fca955830e7d3a3c721fb
3
  size 495616
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b0a77df8f4a2c4fd4a450df8c3c1587145f3a2def05fe72cdbc91c7667ce6b77
3
  size 495616