leavoigt commited on
Commit
d0ad8eb
·
verified ·
1 Parent(s): 3d237b4

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +82 -32
app.py CHANGED
@@ -6,55 +6,105 @@ from utils.generator import generate
6
  # ---------------------------------------------------------------------
7
 
8
 
9
- # Only run once to download full requirements
10
  import subprocess
11
  from pathlib import Path
 
 
 
 
 
12
 
13
  def dump_installed_packages():
14
- output_file = Path("requirements_full.txt")
15
  try:
16
- with output_file.open("w") as f:
17
  subprocess.run(["pip", "freeze"], stdout=f, check=True)
18
- print(f"[INFO] requirements_full.txt created at: {output_file.resolve()}")
19
  except Exception as e:
20
- print(f"[ERROR] Could not write requirements: {e}")
21
 
 
22
  dump_installed_packages()
23
 
 
 
 
24
 
 
 
 
25
 
26
- ui = gr.Interface(
27
- fn=generate,
28
- inputs=[
29
- gr.Textbox(
30
- label="Query",
31
- lines=2,
32
- placeholder="Enter query here",
33
- info="The query to search for in the vector database"
34
- ),
35
- gr.Textbox(
36
- label="Context",
37
- lines=8,
38
- placeholder="Paste relevant context here",
39
- info="Provide the context/documents to use for answering. The API expects a list of dictionaries, but the UI should except anything"
 
 
 
 
 
 
 
 
40
  ),
41
- ],
42
- outputs=gr.Textbox(
43
- label="Generated Answer",
44
- lines=6,
45
- show_copy_button=True
46
- ),
47
- title="ChatFed Generation Module",
48
- description="Ask questions based on provided context. Intended for use in RAG pipelines as an MCP server with other ChatFed modules (i.e. context supplied by semantic retriever service).",
49
- api_name="generate"
50
- )
51
-
52
- # Launch with MCP server enabled
53
  if __name__ == "__main__":
54
- ui.launch(
55
  server_name="0.0.0.0",
56
  server_port=7860,
57
  mcp_server=True,
58
  show_error=True
59
  )
60
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
6
  # ---------------------------------------------------------------------
7
 
8
 
 
9
  import subprocess
10
  from pathlib import Path
11
+ import gradio as gr
12
+ from utils.generator import generate
13
+
14
+ # File to store frozen packages
15
+ FREEZE_FILE = "requirements_full.txt"
16
 
17
  def dump_installed_packages():
18
+ """Generate the list of installed packages"""
19
  try:
20
+ with open(FREEZE_FILE, "w") as f:
21
  subprocess.run(["pip", "freeze"], stdout=f, check=True)
22
+ return FREEZE_FILE
23
  except Exception as e:
24
+ return f"Error generating requirements list: {e}"
25
 
26
+ # Generate once at startup
27
  dump_installed_packages()
28
 
29
+ # ---------------------------------------------------------------------
30
+ # Unified Gradio App (Blocks with Interface + File)
31
+ # ---------------------------------------------------------------------
32
 
33
+ with gr.Blocks() as demo:
34
+ gr.Markdown("📦 Download the list of installed Python packages:")
35
+ gr.File(value=FREEZE_FILE, label="requirements_full.txt")
36
 
37
+ gr.Markdown("## 🔍 ChatFed Generation Module")
38
+
39
+ ui = gr.Interface(
40
+ fn=generate,
41
+ inputs=[
42
+ gr.Textbox(
43
+ label="Query",
44
+ lines=2,
45
+ placeholder="Enter query here",
46
+ info="The query to search for in the vector database"
47
+ ),
48
+ gr.Textbox(
49
+ label="Context",
50
+ lines=8,
51
+ placeholder="Paste relevant context here",
52
+ info="Provide the context/documents to use for answering."
53
+ ),
54
+ ],
55
+ outputs=gr.Textbox(
56
+ label="Generated Answer",
57
+ lines=6,
58
+ show_copy_button=True
59
  ),
60
+ api_name="generate"
61
+ )
62
+
63
+ ui.render()
64
+
65
+ # Launch with MCP support
 
 
 
 
 
 
66
  if __name__ == "__main__":
67
+ demo.launch(
68
  server_name="0.0.0.0",
69
  server_port=7860,
70
  mcp_server=True,
71
  show_error=True
72
  )
73
 
74
+
75
+
76
+ # ui = gr.Interface(
77
+ # fn=generate,
78
+ # inputs=[
79
+ # gr.Textbox(
80
+ # label="Query",
81
+ # lines=2,
82
+ # placeholder="Enter query here",
83
+ # info="The query to search for in the vector database"
84
+ # ),
85
+ # gr.Textbox(
86
+ # label="Context",
87
+ # lines=8,
88
+ # placeholder="Paste relevant context here",
89
+ # info="Provide the context/documents to use for answering. The API expects a list of dictionaries, but the UI should except anything"
90
+ # ),
91
+ # ],
92
+ # outputs=gr.Textbox(
93
+ # label="Generated Answer",
94
+ # lines=6,
95
+ # show_copy_button=True
96
+ # ),
97
+ # title="ChatFed Generation Module",
98
+ # description="Ask questions based on provided context. Intended for use in RAG pipelines as an MCP server with other ChatFed modules (i.e. context supplied by semantic retriever service).",
99
+ # api_name="generate"
100
+ # )
101
+
102
+ # # Launch with MCP server enabled
103
+ # if __name__ == "__main__":
104
+ # ui.launch(
105
+ # server_name="0.0.0.0",
106
+ # server_port=7860,
107
+ # mcp_server=True,
108
+ # show_error=True
109
+ # )
110
+