Grinding commited on
Commit
626176b
·
verified ·
1 Parent(s): 6a01846

Update app/processing.py

Browse files
Files changed (1) hide show
  1. app/processing.py +8 -4
app/processing.py CHANGED
@@ -150,17 +150,21 @@ async def run_pipeline(task_id: str, file_path: Path, tasks_db: dict):
150
 
151
  summary_task = asyncio.to_thread(
152
  groq_client.chat.completions.create,
153
- model="llama3-70b-8192",
154
  messages=[{"role": "system", "content": SUMMARIZATION_SYSTEM_PROMPT}, {"role": "user", "content": full_transcript}],
155
- temperature=0.2,
 
 
156
  max_tokens=1024
157
  )
158
 
159
  action_item_task = asyncio.to_thread(
160
  groq_client.chat.completions.create,
161
- model="llama3-70b-8192",
162
  messages=[{"role": "system", "content": ACTION_ITEMS_SYSTEM_PROMPT}, {"role": "user", "content": full_transcript}],
163
- temperature=0.1,
 
 
164
  max_tokens=1024,
165
  response_format={"type": "json_object"}
166
  )
 
150
 
151
  summary_task = asyncio.to_thread(
152
  groq_client.chat.completions.create,
153
+ model="qwen/qwen3-32b",
154
  messages=[{"role": "system", "content": SUMMARIZATION_SYSTEM_PROMPT}, {"role": "user", "content": full_transcript}],
155
+ temperature=0.6,
156
+ reasoning_effort="default",
157
+ reasoning_format="hidden",
158
  max_tokens=1024
159
  )
160
 
161
  action_item_task = asyncio.to_thread(
162
  groq_client.chat.completions.create,
163
+ model="qwen/qwen3-32b",
164
  messages=[{"role": "system", "content": ACTION_ITEMS_SYSTEM_PROMPT}, {"role": "user", "content": full_transcript}],
165
+ temperature=0.6,
166
+ reasoning_effort="default",
167
+ reasoning_format="hidden",
168
  max_tokens=1024,
169
  response_format={"type": "json_object"}
170
  )