swayam-the-coder commited on
Commit
3f9aaf6
Β·
verified Β·
1 Parent(s): f4f9616

Upload app.py

Browse files
Files changed (1) hide show
  1. app.py +552 -0
app.py ADDED
@@ -0,0 +1,552 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import streamlit as st
3
+ from pathlib import Path
4
+ from tempfile import TemporaryDirectory
5
+ from langchain_core.messages import BaseMessage, HumanMessage
6
+ from typing import Annotated, List, Optional, Dict
7
+ from typing_extensions import TypedDict
8
+ from langchain_community.document_loaders import WebBaseLoader
9
+ from langchain_community.tools.tavily_search import TavilySearchResults
10
+ from langchain_core.tools import tool
11
+ from langchain.agents import AgentExecutor, create_openai_functions_agent
12
+ from langchain.output_parsers.openai_functions import JsonOutputFunctionsParser
13
+ from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder
14
+ from langchain_openai import ChatOpenAI
15
+ from langgraph.graph import END, StateGraph, START
16
+ import functools
17
+ import operator
18
+ import logging
19
+ import time
20
+ from tenacity import retry, stop_after_attempt, wait_exponential, RetryError
21
+ from pydantic import ValidationError
22
+
23
+ # Set up logging
24
+ logging.basicConfig(level=logging.INFO)
25
+ logger = logging.getLogger(__name__)
26
+
27
+ # Initialize temporary directory
28
+ if 'working_directory' not in st.session_state:
29
+ _TEMP_DIRECTORY = TemporaryDirectory()
30
+ st.session_state.working_directory = Path(_TEMP_DIRECTORY.name)
31
+
32
+ WORKING_DIRECTORY = st.session_state.working_directory
33
+
34
+ # Streamlit UI
35
+ st.set_page_config(page_title="MARS: Multi-Agent Report Synthesizer", layout="wide")
36
+
37
+ # Custom CSS for styling
38
+ st.markdown("""
39
+ <style>
40
+ body {
41
+ background-color: #f5f5f5;
42
+ color: #333333;
43
+ font-family: 'Comic Sans MS', 'Comic Sans', cursive;
44
+ }
45
+ .report-container {
46
+ border-radius: 10px;
47
+ background-color: #ffcccb;
48
+ padding: 20px;
49
+ }
50
+ .sidebar .sidebar-content {
51
+ background-color: #333333;
52
+ color: #ffffff;
53
+ }
54
+ .stButton button {
55
+ background-color: #ff6347;
56
+ color: #ffffff;
57
+ border-radius: 5px;
58
+ font-size: 18px;
59
+ padding: 10px 20px;
60
+ font-weight: bold;
61
+ }
62
+ .stTextInput input {
63
+ border-radius: 5px;
64
+ border: 2px solid #ff6347;
65
+ font-size: 16px;
66
+ padding: 10px;
67
+ width: 100%;
68
+ }
69
+ .stTextInput label {
70
+ font-size: 18px;
71
+ font-weight: bold;
72
+ color: #333333;
73
+ }
74
+ .stSelectbox label, .stDownloadButton label {
75
+ font-size: 18px;
76
+ font-weight: bold;
77
+ color: #333333;
78
+ }
79
+ .stSelectbox div, .stDownloadButton div {
80
+ background-color: #ffcccb;
81
+ color: #333333;
82
+ border-radius: 5px;
83
+ padding: 10px;
84
+ font-size: 16px;
85
+ }
86
+ </style>
87
+ """, unsafe_allow_html=True)
88
+
89
+ st.title("πŸš€ MARS: Multi-agent Report Synthesizer πŸ€–")
90
+ st.sidebar.title("πŸ“‹ Instructions")
91
+ st.sidebar.write("""
92
+ 1. Enter your query in the input box.
93
+ 2. Marvin AI will assign tasks to different teams.
94
+ 3. You can see the progress and download the final report.
95
+ 4. Use the buttons to list and download output files.
96
+ """)
97
+
98
+ # Input fields for API keys
99
+ openai_api_key = st.sidebar.text_input("OpenAI API Key", type="password")
100
+ tavily_api_key = st.sidebar.text_input("Tavily API Key", type="password")
101
+
102
+ # Store the API keys in the session state
103
+ if openai_api_key:
104
+ os.environ["OPENAI_API_KEY"] = openai_api_key
105
+ if tavily_api_key:
106
+ os.environ["TAVILY_API_KEY"] = tavily_api_key
107
+
108
+ # Check if the API keys are set
109
+ if not os.getenv("OPENAI_API_KEY"):
110
+ st.error("OpenAI API Key is required.")
111
+ if not os.getenv("TAVILY_API_KEY"):
112
+ st.error("Tavily API Key is required.")
113
+
114
+ # Define tools
115
+ @retry(stop=stop_after_attempt(3), wait=wait_exponential(multiplier=1, min=4, max=10))
116
+ def tavily_search_with_retry(*args, **kwargs):
117
+ try:
118
+ result = TavilySearchResults(*args, **kwargs)
119
+ return result
120
+ except ValidationError as ve:
121
+ logger.error(f"Validation error: {ve}")
122
+ raise ve
123
+ except Exception as e:
124
+ logger.error(f"Error in Tavily search: {e}")
125
+ raise e
126
+
127
+ tavily_tool = tavily_search_with_retry(max_results=5)
128
+
129
+ @tool
130
+ def scrape_webpages(urls: List[str]) -> str:
131
+ """Use requests and bs4 to scrape the provided web pages for detailed information."""
132
+ try:
133
+ loader = WebBaseLoader(urls)
134
+ docs = loader.load()
135
+ return "\n\n".join(
136
+ [
137
+ f'\n{doc.page_content}\n'
138
+ for doc in docs
139
+ ]
140
+ )
141
+ except Exception as e:
142
+ logger.error(f"Error in scrape_webpages: {str(e)}")
143
+ return f"Error occurred while scraping webpages: {str(e)}"
144
+
145
+ @tool
146
+ def create_outline(
147
+ points: Annotated[List[str], "List of main points or sections."],
148
+ file_name: Annotated[str, "File path to save the outline."],
149
+ ) -> Annotated[str, "Path of the saved outline file."]:
150
+ """Create and save an outline."""
151
+ try:
152
+ with (WORKING_DIRECTORY / file_name).open("w") as file:
153
+ for i, point in enumerate(points):
154
+ file.write(f"{i + 1}. {point}\n")
155
+ return f"Outline saved to {file_name}"
156
+ except Exception as e:
157
+ logger.error(f"Error in create_outline: {str(e)}")
158
+ return f"Error occurred while creating outline: {str(e)}"
159
+
160
+ @tool
161
+ def read_document(
162
+ file_name: Annotated[str, "File path to save the document."],
163
+ start: Annotated[Optional[int], "The start line. Default is 0"] = None,
164
+ end: Annotated[Optional[int], "The end line. Default is None"] = None,
165
+ ) -> str:
166
+ """Read the specified document."""
167
+ try:
168
+ with (WORKING_DIRECTORY / file_name).open("r") as file:
169
+ lines = file.readlines()
170
+ if start is not None:
171
+ start = 0
172
+ return "\n".join(lines[start:end])
173
+ except Exception as e:
174
+ logger.error(f"Error in read_document: {str(e)}")
175
+ return f"Error occurred while reading document: {str(e)}"
176
+
177
+ @tool
178
+ def write_document(
179
+ content: Annotated[str, "Text content to be written into the document."],
180
+ file_name: Annotated[str, "File path to save the document."],
181
+ ) -> Annotated[str, "Path of the saved document file."]:
182
+ """Create and save a text document."""
183
+ try:
184
+ with (WORKING_DIRECTORY / file_name).open("w") as file:
185
+ file.write(content)
186
+ return f"Document saved to {file_name}"
187
+ except Exception as e:
188
+ logger.error(f"Error in write_document: {str(e)}")
189
+ return f"Error occurred while writing document: {str(e)}"
190
+
191
+ @tool
192
+ def edit_document(
193
+ file_name: Annotated[str, "Path of the document to be edited."],
194
+ inserts: Annotated[
195
+ Dict[int, str],
196
+ "Dictionary where key is the line number (1-indexed) and value is the text to be inserted at that line.",
197
+ ],
198
+ ) -> Annotated[str, "Path of the edited document file."]:
199
+ """Edit a document by inserting text at specific line numbers."""
200
+ try:
201
+ with (WORKING_DIRECTORY / file_name).open("r") as file:
202
+ lines = file.readlines()
203
+ sorted_inserts = sorted(inserts.items())
204
+ for line_number, text in sorted_inserts:
205
+ if 1 <= line_number <= len(lines) + 1:
206
+ lines.insert(line_number - 1, text + "\n")
207
+ else:
208
+ return f"Error: Line number {line_number} is out of range."
209
+ with (WORKING_DIRECTORY / file_name).open("w") as file:
210
+ file.writelines(lines)
211
+ return f"Document edited and saved to {file_name}"
212
+ except Exception as e:
213
+ logger.error(f"Error in edit_document: {str(e)}")
214
+ return f"Error occurred while editing document: {str(e)}"
215
+
216
+ # Define the agents and their tools
217
+ llm = ChatOpenAI(model="gpt-3.5-turbo-0125")
218
+
219
+ def create_agent(llm: ChatOpenAI, tools: list, system_prompt: str) -> str:
220
+ """Create a function-calling agent and add it to the graph."""
221
+ system_prompt += """\nWork autonomously according to your specialty, using the tools available to you.
222
+ Do not ask for clarification.
223
+ Your other team members (and other teams) will collaborate with you with their own specialties.
224
+ You are chosen for a reason! You are one of the following team members: {team_members}."""
225
+ prompt = ChatPromptTemplate.from_messages(
226
+ [
227
+ ("system", system_prompt),
228
+ MessagesPlaceholder(variable_name="messages"),
229
+ MessagesPlaceholder(variable_name="agent_scratchpad"),
230
+ ]
231
+ )
232
+ agent = create_openai_functions_agent(llm, tools, prompt)
233
+ executor = AgentExecutor(agent=agent, tools=tools)
234
+ return executor
235
+
236
+ def agent_node(state, agent, name):
237
+ try:
238
+ logger.info(f"Starting {name} agent")
239
+ result = agent.invoke(state)
240
+ logger.info(f"{name} agent completed with result: {result}")
241
+ return {"messages": [HumanMessage(content=result["output"], name=name)]}
242
+ except ValidationError as ve:
243
+ logger.error(f"Validation error in {name} agent: {ve}")
244
+ return {"messages": [HumanMessage(content=f"Validation error in {name} agent: {ve}", name=name)]}
245
+ except Exception as e:
246
+ logger.error(f"Error in {name} agent: {e}")
247
+ return {"messages": [HumanMessage(content=f"Error occurred in {name} agent: {e}", name=name)]}
248
+
249
+ def create_team_supervisor(llm: ChatOpenAI, system_prompt, members) -> str:
250
+ """An LLM-based router."""
251
+ options = ["FINISH"] + members
252
+ function_def = {
253
+ "name": "route",
254
+ "description": "Select the next role.",
255
+ "parameters": {
256
+ "title": "routeSchema",
257
+ "type": "object",
258
+ "properties": {
259
+ "next": {
260
+ "title": "Next",
261
+ "anyOf": [
262
+ {"enum": options},
263
+ ],
264
+ },
265
+ },
266
+ "required": ["next"],
267
+ },
268
+ }
269
+ system_prompt += "\nEnsure that you direct the workflow to completion. If no progress is being made, or if the task seems complete, choose FINISH."
270
+ prompt = ChatPromptTemplate.from_messages(
271
+ [
272
+ ("system", system_prompt),
273
+ MessagesPlaceholder(variable_name="messages"),
274
+ ("system", "Given the conversation above, who should act next? Or should we FINISH? Select one of: {options}"),
275
+ ]
276
+ ).partial(options=str(options), team_members=", ".join(members))
277
+ return (
278
+ prompt
279
+ | llm.bind_functions(functions=[function_def], function_call="route")
280
+ | JsonOutputFunctionsParser()
281
+ )
282
+
283
+ # ResearchTeam graph state
284
+ class ResearchTeamState(TypedDict):
285
+ messages: Annotated[List[BaseMessage], operator.add]
286
+ team_members: List[str]
287
+ next: str
288
+
289
+ llm = ChatOpenAI(model="gpt-3.5-turbo-0125")
290
+
291
+ search_agent = create_agent(
292
+ llm,
293
+ [tavily_tool],
294
+ "You are a research assistant who can search for up-to-date info using the tavily search engine.",
295
+ )
296
+ search_node = functools.partial(agent_node, agent=search_agent, name="Search")
297
+
298
+ research_agent = create_agent(
299
+ llm,
300
+ [scrape_webpages],
301
+ "You are a research assistant who can scrape specified urls for more detailed information using the scrape_webpages function.",
302
+ )
303
+ research_node = functools.partial(agent_node, agent=research_agent, name="WebScraper")
304
+
305
+ supervisor_agent = create_team_supervisor(
306
+ llm,
307
+ "You are a supervisor tasked with managing a conversation between the"
308
+ " following workers: Search, WebScraper. Given the following user request,"
309
+ " respond with the worker to act next. Each worker will perform a"
310
+ " task and respond with their results and status. When finished,"
311
+ " respond with FINISH.",
312
+ ["Search", "WebScraper"],
313
+ )
314
+
315
+ research_graph = StateGraph(ResearchTeamState)
316
+ research_graph.add_node("Search", search_node)
317
+ research_graph.add_node("WebScraper", research_node)
318
+ research_graph.add_node("supervisor", supervisor_agent)
319
+
320
+ # Define the control flow
321
+ research_graph.add_edge("Search", "supervisor")
322
+ research_graph.add_edge("WebScraper", "supervisor")
323
+ research_graph.add_conditional_edges(
324
+ "supervisor",
325
+ lambda x: x["next"],
326
+ {"Search": "Search", "WebScraper": "WebScraper", "FINISH": END},
327
+ )
328
+
329
+ research_graph.add_edge(START, "supervisor")
330
+ chain = research_graph.compile()
331
+
332
+ def enter_chain(message: str):
333
+ results = {
334
+ "messages": [HumanMessage(content=message)],
335
+ }
336
+ return results
337
+
338
+ research_chain = enter_chain | chain
339
+
340
+ # Document writing team graph state
341
+ class DocWritingState(TypedDict):
342
+ messages: Annotated[List[BaseMessage], operator.add]
343
+ team_members: str
344
+ next: str
345
+ current_files: str
346
+
347
+ def prelude(state):
348
+ written_files = []
349
+ if not WORKING_DIRECTORY.exists():
350
+ WORKING_DIRECTORY.mkdir()
351
+ try:
352
+ written_files = [
353
+ f.relative_to(WORKING_DIRECTORY) for f in WORKING_DIRECTORY.rglob("*")
354
+ ]
355
+ except Exception:
356
+ pass
357
+ if not written_files:
358
+ return {**state, "current_files": "No files written."}
359
+ return {
360
+ **state,
361
+ "current_files": "\nBelow are files your team has written to the directory:\n"
362
+ + "\n".join([f" - {f}" for f in written_files]),
363
+ }
364
+
365
+ doc_writer_agent = create_agent(
366
+ llm,
367
+ [write_document, edit_document, read_document],
368
+ "You are an expert writing a research document.\n"
369
+ "Below are files currently in your directory:\n{current_files}",
370
+ )
371
+ context_aware_doc_writer_agent = prelude | doc_writer_agent
372
+ doc_writing_node = functools.partial(
373
+ agent_node, agent=context_aware_doc_writer_agent, name="DocWriter"
374
+ )
375
+
376
+ note_taking_agent = create_agent(
377
+ llm,
378
+ [create_outline, read_document],
379
+ "You are an expert senior researcher tasked with writing a paper outline and"
380
+ " taking notes to craft a perfect paper.{current_files}",
381
+ )
382
+ context_aware_note_taking_agent = prelude | note_taking_agent
383
+ note_taking_node = functools.partial(
384
+ agent_node, agent=context_aware_note_taking_agent, name="NoteTaker"
385
+ )
386
+
387
+ chart_generating_agent = create_agent(
388
+ llm,
389
+ [read_document],
390
+ "You are a data viz expert tasked with generating charts for a research project."
391
+ "{current_files}",
392
+ )
393
+ context_aware_chart_generating_agent = prelude | chart_generating_agent
394
+ chart_generating_node = functools.partial(
395
+ agent_node, agent=context_aware_note_taking_agent, name="ChartGenerator"
396
+ )
397
+
398
+ doc_writing_supervisor = create_team_supervisor(
399
+ llm,
400
+ "You are a supervisor tasked with managing a conversation between the"
401
+ " following workers: {team_members}. Given the following user request,"
402
+ " respond with the worker to act next. Each worker will perform a"
403
+ " task and respond with their results and status. When finished,"
404
+ " respond with FINISH.",
405
+ ["DocWriter", "NoteTaker", "ChartGenerator"],
406
+ )
407
+
408
+ authoring_graph = StateGraph(DocWritingState)
409
+ authoring_graph.add_node("DocWriter", doc_writing_node)
410
+ authoring_graph.add_node("NoteTaker", note_taking_node)
411
+ authoring_graph.add_node("ChartGenerator", chart_generating_node)
412
+ authoring_graph.add_node("supervisor", doc_writing_supervisor)
413
+
414
+ authoring_graph.add_edge("DocWriter", "supervisor")
415
+ authoring_graph.add_edge("NoteTaker", "supervisor")
416
+ authoring_graph.add_edge("ChartGenerator", "supervisor")
417
+ authoring_graph.add_conditional_edges(
418
+ "supervisor",
419
+ lambda x: x["next"],
420
+ {
421
+ "DocWriter": "DocWriter",
422
+ "NoteTaker": "NoteTaker",
423
+ "ChartGenerator": "ChartGenerator",
424
+ "FINISH": END,
425
+ },
426
+ )
427
+
428
+ authoring_graph.add_edge(START, "supervisor")
429
+ chain = authoring_graph.compile()
430
+
431
+ def enter_chain(message: str, members: List[str]):
432
+ results = {
433
+ "messages": [HumanMessage(content=message)],
434
+ "team_members": ", ".join(members),
435
+ }
436
+ return results
437
+
438
+ authoring_chain = (
439
+ functools.partial(enter_chain, members=authoring_graph.nodes)
440
+ | authoring_graph.compile()
441
+ )
442
+
443
+ llm = ChatOpenAI(model="gpt-3.5-turbo-0125")
444
+
445
+ supervisor_node = create_team_supervisor(
446
+ llm,
447
+ "You are a supervisor tasked with managing a conversation between the"
448
+ " following teams: {team_members}. Given the following user request,"
449
+ " respond with the worker to act next. Each worker will perform a"
450
+ " task and respond with their results and status. Make sure each team is used atleast once. When finished,"
451
+ " respond with FINISH.",
452
+ ["ResearchTeam", "PaperWritingTeam"],
453
+ )
454
+
455
+ class State(TypedDict):
456
+ messages: Annotated[List[BaseMessage], operator.add]
457
+ next: str
458
+
459
+ def get_last_message(state: State) -> str:
460
+ return state["messages"][-1].content
461
+
462
+ def join_graph(response: dict):
463
+ return {"messages": [response["messages"][-1]]}
464
+
465
+ super_graph = StateGraph(State)
466
+ super_graph.add_node("ResearchTeam", get_last_message | research_chain | join_graph)
467
+ super_graph.add_node("PaperWritingTeam", get_last_message | authoring_chain | join_graph)
468
+ super_graph.add_node("supervisor", supervisor_node)
469
+
470
+ super_graph.add_edge("ResearchTeam", "supervisor")
471
+ super_graph.add_edge("PaperWritingTeam", "supervisor")
472
+ super_graph.add_conditional_edges(
473
+ "supervisor",
474
+ lambda x: x["next"],
475
+ {
476
+ "PaperWritingTeam": "PaperWritingTeam",
477
+ "ResearchTeam": "ResearchTeam",
478
+ "FINISH": END,
479
+ },
480
+ )
481
+ super_graph.add_edge(START, "supervisor")
482
+ super_graph = super_graph.compile()
483
+
484
+ input_text = st.text_input("Enter your query:")
485
+
486
+ if input_text and os.getenv("OPENAI_API_KEY") and os.getenv("TAVILY_API_KEY"):
487
+ st.markdown("### πŸ› οΈ Task Progress")
488
+ start_time = time.time()
489
+ max_execution_time = 300 # 5 minutes
490
+
491
+ try:
492
+ for s in super_graph.stream(
493
+ {
494
+ "messages": [
495
+ HumanMessage(
496
+ content=input_text
497
+ )
498
+ ],
499
+ },
500
+ {"recursion_limit": 300}, # Increased recursion limit
501
+ ):
502
+ if "__end__" not in s:
503
+ st.write(s)
504
+ st.write("---")
505
+
506
+ # Check for timeout
507
+ if time.time() - start_time > max_execution_time:
508
+ st.warning("Execution time exceeded. Terminating the process.")
509
+ break
510
+ except RetryError as re:
511
+ st.error(f"Retry error occurred: {re}")
512
+ logger.error(f"Retry error in super_graph execution: {re}")
513
+ except ValidationError as ve:
514
+ st.error(f"Validation error occurred: {ve}")
515
+ logger.error(f"Validation error in super_graph execution: {ve}")
516
+ except Exception as e:
517
+ st.error(f"An error occurred: {str(e)}")
518
+ logger.error(f"Error in super_graph execution: {str(e)}")
519
+
520
+ if st.button("List Output Files"):
521
+ files = os.listdir(WORKING_DIRECTORY)
522
+ if files:
523
+ st.write("### πŸ“‚ Files in working directory:")
524
+ for file in files:
525
+ st.write(f"πŸ“„ {file}")
526
+ else:
527
+ st.write("No files found in the working directory.")
528
+
529
+ output_files = os.listdir(WORKING_DIRECTORY)
530
+ if output_files:
531
+ output_file = st.selectbox("Select an output file to download:", output_files)
532
+
533
+ if st.button("Download Output Document"):
534
+ file_path = WORKING_DIRECTORY / output_file
535
+ if file_path.exists():
536
+ with file_path.open("rb") as file:
537
+ st.download_button(
538
+ label="πŸ“₯ Download Output Document",
539
+ data=file,
540
+ file_name=output_file,
541
+ )
542
+ else:
543
+ st.write("Output document not found.")
544
+ else:
545
+ st.write("No output files available for download.")
546
+
547
+ # Cleanup
548
+ if st.button("Clear Working Directory"):
549
+ for file in WORKING_DIRECTORY.iterdir():
550
+ if file.is_file():
551
+ file.unlink()
552
+ st.success("Working directory cleared.")