Ravindra commited on
Commit
34fc6b8
·
verified ·
1 Parent(s): 59ced9e

V3 files 1

Browse files
Files changed (4) hide show
  1. .gitattributes +0 -66
  2. Dockerfile-old-version.txt +43 -43
  3. agents/crew.py +29 -18
  4. api/config.py +4 -0
.gitattributes CHANGED
@@ -33,69 +33,3 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
- web/.next/dev/cache/turbopack/23c46498/00000011.sst filter=lfs diff=lfs merge=lfs -text
37
- web/.next/dev/cache/turbopack/23c46498/00000012.sst filter=lfs diff=lfs merge=lfs -text
38
- web/.next/dev/cache/turbopack/23c46498/00000013.sst filter=lfs diff=lfs merge=lfs -text
39
- web/.next/dev/cache/turbopack/23c46498/00000014.sst filter=lfs diff=lfs merge=lfs -text
40
- web/.next/dev/cache/turbopack/23c46498/00000015.sst filter=lfs diff=lfs merge=lfs -text
41
- web/.next/dev/cache/turbopack/23c46498/00000016.sst filter=lfs diff=lfs merge=lfs -text
42
- web/.next/dev/cache/turbopack/23c46498/00000017.sst filter=lfs diff=lfs merge=lfs -text
43
- web/.next/dev/cache/turbopack/23c46498/00000018.sst filter=lfs diff=lfs merge=lfs -text
44
- web/.next/dev/cache/turbopack/23c46498/00000020.sst filter=lfs diff=lfs merge=lfs -text
45
- web/.next/dev/cache/turbopack/23c46498/00000021.sst filter=lfs diff=lfs merge=lfs -text
46
- web/.next/dev/cache/turbopack/23c46498/00000022.sst filter=lfs diff=lfs merge=lfs -text
47
- web/.next/dev/cache/turbopack/23c46498/00000023.sst filter=lfs diff=lfs merge=lfs -text
48
- web/.next/dev/cache/turbopack/23c46498/00000024.sst filter=lfs diff=lfs merge=lfs -text
49
- web/.next/dev/cache/turbopack/23c46498/00000025.sst filter=lfs diff=lfs merge=lfs -text
50
- web/.next/dev/cache/turbopack/23c46498/00000026.sst filter=lfs diff=lfs merge=lfs -text
51
- web/.next/dev/cache/turbopack/23c46498/00000027.sst filter=lfs diff=lfs merge=lfs -text
52
- web/.next/dev/cache/turbopack/23c46498/00000029.meta filter=lfs diff=lfs merge=lfs -text
53
- web/.next/dev/cache/turbopack/23c46498/00000030.meta filter=lfs diff=lfs merge=lfs -text
54
- web/.next/dev/cache/turbopack/23c46498/00000031.meta filter=lfs diff=lfs merge=lfs -text
55
- web/.next/dev/cache/turbopack/23c46498/00000032.meta filter=lfs diff=lfs merge=lfs -text
56
- web/.next/dev/cache/turbopack/23c46498/00000033.sst filter=lfs diff=lfs merge=lfs -text
57
- web/.next/dev/cache/turbopack/23c46498/00000034.sst filter=lfs diff=lfs merge=lfs -text
58
- web/.next/dev/cache/turbopack/23c46498/00000043.sst filter=lfs diff=lfs merge=lfs -text
59
- web/.next/dev/cache/turbopack/23c46498/00000044.sst filter=lfs diff=lfs merge=lfs -text
60
- web/.next/dev/cache/turbopack/23c46498/00000108.sst filter=lfs diff=lfs merge=lfs -text
61
- web/.next/dev/cache/turbopack/23c46498/00000210.sst filter=lfs diff=lfs merge=lfs -text
62
- web/.next/dev/cache/turbopack/23c46498/00000287.sst filter=lfs diff=lfs merge=lfs -text
63
- web/.next/dev/cache/turbopack/23c46498/00000396.sst filter=lfs diff=lfs merge=lfs -text
64
- web/.next/dev/cache/turbopack/23c46498/00000408.sst filter=lfs diff=lfs merge=lfs -text
65
- web/.next/dev/cache/turbopack/23c46498/00000426.sst filter=lfs diff=lfs merge=lfs -text
66
- web/.next/dev/cache/turbopack/23c46498/00000492.sst filter=lfs diff=lfs merge=lfs -text
67
- web/.next/dev/cache/turbopack/23c46498/00000498.sst filter=lfs diff=lfs merge=lfs -text
68
- web/.next/dev/cache/turbopack/23c46498/00000504.sst filter=lfs diff=lfs merge=lfs -text
69
- web/.next/dev/cache/turbopack/23c46498/00000510.sst filter=lfs diff=lfs merge=lfs -text
70
- web/.next/dev/cache/turbopack/23c46498/00000516.sst filter=lfs diff=lfs merge=lfs -text
71
- web/.next/dev/cache/turbopack/23c46498/00000528.sst filter=lfs diff=lfs merge=lfs -text
72
- web/.next/dev/cache/turbopack/23c46498/00000540.sst filter=lfs diff=lfs merge=lfs -text
73
- web/.next/dev/cache/turbopack/23c46498/00000593.sst filter=lfs diff=lfs merge=lfs -text
74
- web/.next/dev/cache/turbopack/23c46498/00000617.sst filter=lfs diff=lfs merge=lfs -text
75
- web/.next/dev/cache/turbopack/23c46498/00000803.sst filter=lfs diff=lfs merge=lfs -text
76
- web/.next/dev/cache/turbopack/23c46498/00000804.sst filter=lfs diff=lfs merge=lfs -text
77
- web/.next/dev/cache/turbopack/23c46498/00000819.sst filter=lfs diff=lfs merge=lfs -text
78
- web/.next/dev/cache/turbopack/23c46498/00000820.sst filter=lfs diff=lfs merge=lfs -text
79
- web/.next/dev/cache/turbopack/23c46498/00000824.meta filter=lfs diff=lfs merge=lfs -text
80
- web/.next/dev/cache/turbopack/23c46498/00000825.sst filter=lfs diff=lfs merge=lfs -text
81
- web/.next/dev/cache/turbopack/23c46498/00000826.sst filter=lfs diff=lfs merge=lfs -text
82
- web/.next/dev/cache/turbopack/23c46498/00000835.sst filter=lfs diff=lfs merge=lfs -text
83
- web/.next/dev/cache/turbopack/23c46498/00000836.sst filter=lfs diff=lfs merge=lfs -text
84
- web/.next/dev/cache/turbopack/23c46498/00000837.sst filter=lfs diff=lfs merge=lfs -text
85
- web/.next/dev/cache/turbopack/23c46498/00000838.sst filter=lfs diff=lfs merge=lfs -text
86
- web/.next/dev/cache/turbopack/23c46498/00000839.sst filter=lfs diff=lfs merge=lfs -text
87
- web/.next/dev/cache/turbopack/23c46498/00000841.sst filter=lfs diff=lfs merge=lfs -text
88
- web/.next/dev/cache/turbopack/23c46498/00000842.sst filter=lfs diff=lfs merge=lfs -text
89
- web/.next/dev/cache/turbopack/23c46498/00000844.meta filter=lfs diff=lfs merge=lfs -text
90
- web/.next/dev/cache/turbopack/23c46498/00000845.meta filter=lfs diff=lfs merge=lfs -text
91
- web/.next/dev/cache/turbopack/23c46498/00000846.meta filter=lfs diff=lfs merge=lfs -text
92
- web/.next/dev/cache/turbopack/23c46498/00000847.meta filter=lfs diff=lfs merge=lfs -text
93
- web/.next/dev/cache/turbopack/23c46498/00000848.sst filter=lfs diff=lfs merge=lfs -text
94
- web/.next/dev/cache/turbopack/23c46498/00000849.sst filter=lfs diff=lfs merge=lfs -text
95
- web/.next/dev/cache/turbopack/23c46498/00000854.sst filter=lfs diff=lfs merge=lfs -text
96
- web/.next/dev/cache/turbopack/23c46498/00000878.sst filter=lfs diff=lfs merge=lfs -text
97
- web/.next/dev/cache/turbopack/23c46498/00000897.sst filter=lfs diff=lfs merge=lfs -text
98
- web/.next/dev/cache/turbopack/23c46498/00000968.sst filter=lfs diff=lfs merge=lfs -text
99
- web/.next/dev/cache/turbopack/23c46498/00000969.sst filter=lfs diff=lfs merge=lfs -text
100
- web/.next/dev/cache/turbopack/23c46498/00001004.sst filter=lfs diff=lfs merge=lfs -text
101
- web/.next/dev/cache/turbopack/23c46498/00001058.sst filter=lfs diff=lfs merge=lfs -text
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
Dockerfile-old-version.txt CHANGED
@@ -1,44 +1,44 @@
1
- # --- Stage 1: Frontend Builder ---
2
- FROM node:20-alpine AS frontend-builder
3
- # Set WORKDIR specifically to the web folder to resolve aliases correctly
4
- WORKDIR /app/web
5
-
6
- # Copy package files first to leverage Docker cache
7
- COPY web/package.json web/package-lock.json ./
8
- RUN npm ci
9
-
10
- # Copy the rest of the web source code
11
- COPY web/ ./
12
-
13
- # Set environment variables for the build
14
- ENV NEXT_PUBLIC_API_URL=""
15
- # Disable Turbopack - use webpack which correctly resolves @/ path aliases
16
- ENV TURBOPACK=0
17
- # Force a clean production build to generate the .next folder
18
- RUN npm run build
19
-
20
- # --- Stage 2: Final Production Image ---
21
- FROM python:3.11-slim
22
- WORKDIR /app
23
-
24
- # Install Redis for your Celery background tasks
25
- RUN apt-get update && apt-get install -y redis-server git && rm -rf /var/lib/apt/lists/*
26
-
27
- # Install Python dependencies for CrewAI and FastAPI
28
- COPY requirements.txt .
29
- RUN pip install --no-cache-dir -r requirements.txt
30
-
31
- # Copy all backend code (api/, agents/, tasks.py, start.sh)
32
- COPY . .
33
-
34
- # Copy the static export output that FastAPI serves via StaticFiles
35
- COPY --from=frontend-builder /app/web/out /app/web/out
36
-
37
- # Ensure the startup script is executable
38
- RUN chmod +x start.sh
39
-
40
- # Hugging Face Spaces mandatory port
41
- ENV PORT=7860
42
- EXPOSE 7860
43
-
44
  CMD ["./start.sh"]
 
1
+ # --- Stage 1: Frontend Builder ---
2
+ FROM node:20-alpine AS frontend-builder
3
+ # Set WORKDIR specifically to the web folder to resolve aliases correctly
4
+ WORKDIR /app/web
5
+
6
+ # Copy package files first to leverage Docker cache
7
+ COPY web/package.json web/package-lock.json ./
8
+ RUN npm ci
9
+
10
+ # Copy the rest of the web source code
11
+ COPY web/ ./
12
+
13
+ # Set environment variables for the build
14
+ ENV NEXT_PUBLIC_API_URL=""
15
+ # Disable Turbopack - use webpack which correctly resolves @/ path aliases
16
+ ENV TURBOPACK=0
17
+ # Force a clean production build to generate the .next folder
18
+ RUN npm run build
19
+
20
+ # --- Stage 2: Final Production Image ---
21
+ FROM python:3.11-slim
22
+ WORKDIR /app
23
+
24
+ # Install Redis for your Celery background tasks
25
+ RUN apt-get update && apt-get install -y redis-server git && rm -rf /var/lib/apt/lists/*
26
+
27
+ # Install Python dependencies for CrewAI and FastAPI
28
+ COPY requirements.txt .
29
+ RUN pip install --no-cache-dir -r requirements.txt
30
+
31
+ # Copy all backend code (api/, agents/, tasks.py, start.sh)
32
+ COPY . .
33
+
34
+ # Copy the static export output that FastAPI serves via StaticFiles
35
+ COPY --from=frontend-builder /app/web/out /app/web/out
36
+
37
+ # Ensure the startup script is executable
38
+ RUN chmod +x start.sh
39
+
40
+ # Hugging Face Spaces mandatory port
41
+ ENV PORT=7860
42
+ EXPOSE 7860
43
+
44
  CMD ["./start.sh"]
agents/crew.py CHANGED
@@ -85,12 +85,16 @@ def _build_llm(provider: str):
85
  groq_model = f"groq/{groq_model}"
86
  return LLM(
87
  model=groq_model,
88
- api_key=settings.GROQ_API_KEY
 
 
89
  )
90
 
91
  return LLM(
92
  model=settings.OPENAI_MODEL,
93
- api_key=settings.OPENAI_API_KEY
 
 
94
  )
95
 
96
 
@@ -147,27 +151,34 @@ def get_crew(
147
  domain = domain.lower()
148
  persona = PERSONAS.get(domain, PERSONAS["fintech"]) # Default to fintech if not found
149
 
150
- # Initialize LLM
151
- llm = _build_llm(provider=provider)
152
-
153
- # 1. Create Agents
154
- researcher = Agent(
 
 
 
 
 
 
 
155
  role=persona["researcher"]["role"],
156
  goal=persona["researcher"]["goal"].format(topic=topic),
157
- backstory=persona["researcher"]["backstory"],
158
- allow_delegation=False,
159
- verbose=True,
160
- llm=llm
161
- )
162
 
163
  writer = Agent(
164
  role=persona["writer"]["role"],
165
  goal=persona["writer"]["goal"].format(topic=topic),
166
- backstory=persona["writer"]["backstory"],
167
- allow_delegation=False,
168
- verbose=True,
169
- llm=llm
170
- )
171
 
172
  live_context = _fetch_live_context(topic=topic, domain=domain) if grounded else ""
173
  grounding_block = ""
@@ -209,7 +220,7 @@ def get_crew(
209
  f"Using the research provided, write an engaging, high-growth newsletter about "
210
  f"'{topic}'. Use a viral style with catchy subheadings like {sections}. Conclude "
211
  f"with an 'Actionable Insights' section. The tone should be {tone}. Include a "
212
- f"'Sources' section with URL and date for each reference."
213
  ),
214
  expected_output="A viral-style markdown newsletter ready for publication.",
215
  agent=writer
 
85
  groq_model = f"groq/{groq_model}"
86
  return LLM(
87
  model=groq_model,
88
+ api_key=settings.GROQ_API_KEY,
89
+ max_tokens=settings.GROQ_MAX_OUTPUT_TOKENS,
90
+ temperature=settings.GROQ_TEMPERATURE,
91
  )
92
 
93
  return LLM(
94
  model=settings.OPENAI_MODEL,
95
+ api_key=settings.OPENAI_API_KEY,
96
+ max_tokens=settings.OPENAI_MAX_OUTPUT_TOKENS,
97
+ temperature=settings.OPENAI_TEMPERATURE,
98
  )
99
 
100
 
 
151
  domain = domain.lower()
152
  persona = PERSONAS.get(domain, PERSONAS["fintech"]) # Default to fintech if not found
153
 
154
+ # Initialize LLM
155
+ llm = _build_llm(provider=provider)
156
+ provider = (provider or "").strip().lower()
157
+ is_groq_guest = provider == "groq"
158
+ length_instruction = (
159
+ "Keep the final newsletter concise: 350-550 words. Avoid filler and repetition."
160
+ if is_groq_guest
161
+ else "Target a thorough but focused final newsletter of 700-1100 words."
162
+ )
163
+
164
+ # 1. Create Agents
165
+ researcher = Agent(
166
  role=persona["researcher"]["role"],
167
  goal=persona["researcher"]["goal"].format(topic=topic),
168
+ backstory=persona["researcher"]["backstory"],
169
+ allow_delegation=False,
170
+ verbose=False,
171
+ llm=llm
172
+ )
173
 
174
  writer = Agent(
175
  role=persona["writer"]["role"],
176
  goal=persona["writer"]["goal"].format(topic=topic),
177
+ backstory=persona["writer"]["backstory"],
178
+ allow_delegation=False,
179
+ verbose=False,
180
+ llm=llm
181
+ )
182
 
183
  live_context = _fetch_live_context(topic=topic, domain=domain) if grounded else ""
184
  grounding_block = ""
 
220
  f"Using the research provided, write an engaging, high-growth newsletter about "
221
  f"'{topic}'. Use a viral style with catchy subheadings like {sections}. Conclude "
222
  f"with an 'Actionable Insights' section. The tone should be {tone}. Include a "
223
+ f"'Sources' section with URL and date for each reference. {length_instruction}"
224
  ),
225
  expected_output="A viral-style markdown newsletter ready for publication.",
226
  agent=writer
api/config.py CHANGED
@@ -10,6 +10,10 @@ class Settings(BaseSettings):
10
  MOCK_AGENTS: bool = False
11
  OPENAI_MODEL: str = "gpt-4o"
12
  GROQ_MODEL: str = "llama-3.1-8b-instant"
 
 
 
 
13
  RATE_LIMIT_SALT: str = "omnicontent-dev-salt"
14
  FINGERPRINT_DAILY_LIMIT: int = 2
15
  IP_FALLBACK_DAILY_LIMIT: int = 1
 
10
  MOCK_AGENTS: bool = False
11
  OPENAI_MODEL: str = "gpt-4o"
12
  GROQ_MODEL: str = "llama-3.1-8b-instant"
13
+ OPENAI_MAX_OUTPUT_TOKENS: int = 1800
14
+ GROQ_MAX_OUTPUT_TOKENS: int = 700
15
+ OPENAI_TEMPERATURE: float = 0.7
16
+ GROQ_TEMPERATURE: float = 0.4
17
  RATE_LIMIT_SALT: str = "omnicontent-dev-salt"
18
  FINGERPRINT_DAILY_LIMIT: int = 2
19
  IP_FALLBACK_DAILY_LIMIT: int = 1