Pontonkid commited on
Commit
2bd30ff
·
verified ·
1 Parent(s): 4dd45c9

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +262 -69
app.py CHANGED
@@ -1,74 +1,267 @@
 
 
 
 
 
 
 
 
 
1
  import gradio as gr
2
- from llama_index import (
3
- SimpleDirectoryReader,
4
- VectorStoreIndex,
5
- ServiceContext,
6
- )
7
- from llama_index.embeddings.huggingface import HuggingFaceEmbedding
8
- from llama_index.llms import HuggingFacePipeline
9
-
10
- from transformers import pipeline
11
-
12
- # --- Configure HuggingFace pipeline ---
13
- hf_model_name = "google/flan-t5-small" # small, fast text2text model
14
- pipe = pipeline("text2text-generation", model=hf_model_name)
15
- llm = HuggingFacePipeline(pipeline=pipe)
16
-
17
- # Embedding model
18
- embed_model = HuggingFaceEmbedding(model_name="sentence-transformers/all-mpnet-base-v2")
19
-
20
- service_context = ServiceContext.from_defaults(llm=llm, embed_model=embed_model)
21
-
22
- index = None
23
-
24
- # --- Functions ---
25
- def ingest_files(files):
26
- global index
27
- if not files:
28
- return "No files uploaded."
29
-
30
- tmp_dir = "uploaded_data"
31
- import os
32
- os.makedirs(tmp_dir, exist_ok=True)
33
-
34
- for f in files:
35
- dest = os.path.join(tmp_dir, f.name)
36
- with open(dest, "wb") as out_f:
37
- out_f.write(f.read())
38
-
39
- docs = SimpleDirectoryReader(tmp_dir).load_data()
40
- index = VectorStoreIndex.from_documents(docs, service_context=service_context)
41
- return f"Ingested {len(docs)} documents successfully."
42
-
43
- def answer_question(messages, user_message):
44
- global index
45
- if index is None:
46
- return "No documents indexed yet. Upload files first."
47
-
48
- query_engine = index.as_query_engine()
49
- resp = query_engine.query(user_message)
50
- return resp.response
51
-
52
- # --- Gradio UI ---
53
- with gr.Blocks(title="HuggingFace LlamaIndex Chat") as demo:
54
- gr.Markdown("<h2 style='text-align:center;color:#4A90E2;'>Upload + HuggingFace LlamaIndex Chatbot</h2>")
55
-
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
56
  with gr.Row():
57
- with gr.Column(scale=1):
58
- file_input = gr.File(
59
- file_types=[".txt", ".pdf", ".md", ".csv"],
60
- file_count="multiple",
61
- label="Upload files"
62
- )
63
- ingest_btn = gr.Button("Ingest / Index")
64
- status = gr.Textbox(label="Status", interactive=False)
65
  with gr.Column(scale=2):
66
- chatbot = gr.Chatbot(label="Chat with your docs", height=400)
67
- user_input = gr.Textbox(label="Ask a question")
68
- send_btn = gr.Button("Send")
69
-
70
- ingest_btn.click(ingest_files, inputs=file_input, outputs=status)
71
- send_btn.click(answer_question, inputs=[chatbot, user_input], outputs=chatbot)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
72
 
73
  if __name__ == "__main__":
74
- demo.launch()
 
 
1
+ # app.py
2
+ import os
3
+ import threading
4
+ import tempfile
5
+ import json
6
+ import datetime
7
+ from pathlib import Path
8
+ import requests
9
+
10
  import gradio as gr
11
+ from fastapi import FastAPI, UploadFile, File
12
+ from fastapi.responses import PlainTextResponse
13
+ import uvicorn
14
+ from reportlab.lib.pagesizes import letter
15
+ from reportlab.pdfgen import canvas
16
+
17
+ # -----------------------
18
+ # CONFIG
19
+ # -----------------------
20
+ REPORT_DIR = Path("./reports")
21
+ REPORT_DIR.mkdir(exist_ok=True)
22
+
23
+ # MCP ports
24
+ FILE_SERVER_PORT = 8001
25
+ EXECUTOR_PORT = 8002
26
+ MEMORY_PORT = 8003
27
+
28
+ # -----------------------
29
+ # MCP SERVERS
30
+ # -----------------------
31
+
32
+ # --- File Server ---
33
+ file_app = FastAPI()
34
+ FILE_STORAGE = Path("file_storage")
35
+ FILE_STORAGE.mkdir(exist_ok=True)
36
+
37
+ @file_app.post("/upload")
38
+ async def upload_file(file: UploadFile = File(...)):
39
+ dest = FILE_STORAGE / file.filename
40
+ content = await file.read()
41
+ with open(dest, "wb") as f:
42
+ f.write(content)
43
+ return {"status":"ok","filename":file.filename,"path":str(dest)}
44
+
45
+ @file_app.get("/list")
46
+ def list_files():
47
+ return {"files": [p.name for p in FILE_STORAGE.iterdir() if p.is_file()]}
48
+
49
+ @file_app.get("/read/{name}")
50
+ def read_file(name: str):
51
+ fpath = FILE_STORAGE / name
52
+ if not fpath.exists():
53
+ return {"error":"not found"}
54
+ return {"filename": name, "content": fpath.read_text(encoding="utf-8")}
55
+
56
+ # --- Executor Server ---
57
+ executor_app = FastAPI()
58
+ EXECUTOR_DB = Path("executor_db")
59
+ EXECUTOR_DB.mkdir(exist_ok=True)
60
+
61
+ @executor_app.post("/apply")
62
+ def apply_patch(req: dict):
63
+ rec = {
64
+ "filename": req.get("filename"),
65
+ "action": req.get("action"),
66
+ "params": req.get("params", {}),
67
+ "timestamp": datetime.datetime.utcnow().isoformat()
68
+ }
69
+ fname = EXECUTOR_DB / f"{rec['filename']}.patchlog.json"
70
+ existing = []
71
+ if fname.exists():
72
+ existing = json.loads(fname.read_text())
73
+ existing.append(rec)
74
+ fname.write_text(json.dumps(existing, indent=2))
75
+ return {"status":"applied","record": rec}
76
+
77
+ # --- Memory Server ---
78
+ memory_app = FastAPI()
79
+ MEMORY_DB = Path("memory_db")
80
+ MEMORY_DB.mkdir(exist_ok=True)
81
+ MEMORY_FILE = MEMORY_DB / "memory.json"
82
+ if not MEMORY_FILE.exists():
83
+ MEMORY_FILE.write_text(json.dumps({}))
84
+
85
+ @memory_app.post("/set")
86
+ def set_memory(item: dict):
87
+ key = item.get("key")
88
+ value = item.get("value")
89
+ m = json.loads(MEMORY_FILE.read_text())
90
+ m[key] = value
91
+ MEMORY_FILE.write_text(json.dumps(m, indent=2))
92
+ return {"status":"ok","key":key}
93
+
94
+ @memory_app.get("/get/{key}")
95
+ def get_memory(key: str):
96
+ m = json.loads(MEMORY_FILE.read_text())
97
+ return {"value": m.get(key)}
98
+
99
+ # -----------------------
100
+ # HELPER FUNCTIONS
101
+ # -----------------------
102
+ def upload_file_to_mcp(filepath, filename):
103
+ url = f"http://localhost:{FILE_SERVER_PORT}/upload"
104
+ with open(filepath, "rb") as f:
105
+ files = {"file": (filename, f)}
106
+ r = requests.post(url, files=files, timeout=30)
107
+ r.raise_for_status()
108
+ return r.json()
109
+
110
+ def list_mcp_files():
111
+ return requests.get(f"http://localhost:{FILE_SERVER_PORT}/list").json()
112
+
113
+ def read_mcp_file(name):
114
+ return requests.get(f"http://localhost:{FILE_SERVER_PORT}/read/{name}").json()
115
+
116
+ def apply_patch_mcp(filename, action, params=None):
117
+ payload = {"filename": filename, "action": action, "params": params or {}}
118
+ return requests.post(f"http://localhost:{EXECUTOR_PORT}/apply", json=payload).json()
119
+
120
+ def memory_set(key, value):
121
+ return requests.post(f"http://localhost:{MEMORY_PORT}/set", json={"key":key,"value":value}).json()
122
+
123
+ def memory_get(key):
124
+ return requests.get(f"http://localhost:{MEMORY_PORT}/get/{key}").json()
125
+
126
+ # -----------------------
127
+ # LOG ANALYSIS & PLAN
128
+ # -----------------------
129
+ def parse_log_text(txt):
130
+ lines = txt.splitlines()
131
+ errors = [l for l in lines if "ERROR" in l or "CRITICAL" in l or "FATAL" in l]
132
+ warnings = [l for l in lines if "WARNING" in l or "WARN" in l]
133
+ return {"errors": errors, "warnings": warnings, "total_lines": len(lines)}
134
+
135
+ def analyze_and_plan(filename):
136
+ r = read_mcp_file(filename)
137
+ if "error" in r:
138
+ return {"summary": "File not found", "issues": [], "suggestions": [], "plan_text": ""}
139
+ text = r.get("content","")
140
+ parsed = parse_log_text(text)
141
+ summary = f"Detected {len(parsed['errors'])} errors and {len(parsed['warnings'])} warnings"
142
+ issues = parsed["errors"][:10] + parsed["warnings"][:10]
143
+ suggestions = [
144
+ "Investigate top errors",
145
+ "Check system configuration",
146
+ "Add proper exception handling"
147
+ ]
148
+ plan_text = "\n".join([
149
+ "STEP 1: Review errors",
150
+ "STEP 2: Apply suggested fixes",
151
+ "STEP 3: Re-run analysis"
152
+ ])
153
+ return {"summary": summary, "issues": issues, "suggestions": suggestions, "plan_text": plan_text}
154
+
155
+ def generate_incident_report(filename, summary, issues, suggestions):
156
+ now = datetime.datetime.utcnow().isoformat().replace(":", "-")
157
+ out = REPORT_DIR / f"incident_{filename}_{now}.pdf"
158
+ c = canvas.Canvas(str(out), pagesize=letter)
159
+ c.setFont("Helvetica-Bold", 16)
160
+ c.drawString(40, 750, f"Incident Report - {filename}")
161
+ c.setFont("Helvetica", 10)
162
+ c.drawString(40, 730, f"Generated: {datetime.datetime.utcnow().isoformat()}")
163
+ y = 700
164
+ c.drawString(40, y, "Summary:")
165
+ c.drawString(60, y-20, summary[:1000])
166
+ y -= 60
167
+ c.drawString(40, y, "Issues:")
168
+ for it in issues:
169
+ y -= 14
170
+ c.drawString(60, y, it[:120])
171
+ if y < 80:
172
+ c.showPage(); y = 740
173
+ y -= 20
174
+ c.drawString(40, y, "Suggestions:")
175
+ for s in suggestions:
176
+ y -= 14
177
+ c.drawString(60, y, f"- {s[:100]}")
178
+ if y < 80:
179
+ c.showPage(); y = 740
180
+ c.save()
181
+ return str(out)
182
+
183
+ def apply_fix_action(filename, action_key):
184
+ mapping = {
185
+ "increase_timeout": ("update_timeout", {"timeout": 60}),
186
+ "increase_retries": ("increase_retries", {"retries": 3}),
187
+ "mark_fixed": ("mark_fixed", {})
188
+ }
189
+ action, params = mapping.get(action_key, ("custom_action", {"note": action_key}))
190
+ res = apply_patch_mcp(filename, action, params)
191
+ memory_set(f"last_patch_{filename}", json.dumps(res))
192
+ return res
193
+
194
+ # -----------------------
195
+ # GRADIO UI
196
+ # -----------------------
197
+ with gr.Blocks() as demo:
198
+ gr.Markdown("## AgentOps MCP - Log Analysis & Agent Planning")
199
  with gr.Row():
 
 
 
 
 
 
 
 
200
  with gr.Column(scale=2):
201
+ upload = gr.File(label="Upload log file")
202
+ files_list = gr.Dropdown(choices=[], label="Files on MCP Server", interactive=True)
203
+ btn_refresh = gr.Button("Refresh files")
204
+ btn_analyze = gr.Button("Analyze selected file")
205
+ apply_dropdown = gr.Dropdown(choices=["increase_timeout","increase_retries","mark_fixed"], label="Select fix action")
206
+ btn_apply = gr.Button("Apply fix")
207
+ with gr.Column(scale=3):
208
+ summary_box = gr.Textbox(label="Agent Summary", lines=6)
209
+ issues_box = gr.Textbox(label="Detected Issues", lines=10)
210
+ suggestions_box = gr.Textbox(label="Suggested Fixes", lines=6)
211
+ plan_box = gr.Textbox(label="Agent Plan / Reasoning", lines=6)
212
+ report_download = gr.File(label="Last Report (download)")
213
+
214
+ def refresh_files():
215
+ resp = list_mcp_files()
216
+ return resp.get("files", [])
217
+
218
+ btn_refresh.click(fn=refresh_files, outputs=files_list)
219
+
220
+ def on_upload(file_obj):
221
+ if file_obj is None:
222
+ return refresh_files()
223
+ tmp = Path(tempfile.gettempdir()) / file_obj.name
224
+ with open(tmp, "wb") as f:
225
+ f.write(file_obj.read())
226
+ try:
227
+ upload_file_to_mcp(str(tmp), file_obj.name)
228
+ except Exception as e:
229
+ print("Upload error:", e)
230
+ return refresh_files()
231
+
232
+ upload.upload(fn=on_upload, inputs=upload, outputs=files_list)
233
+
234
+ def analyze_selected(fname):
235
+ if not fname:
236
+ return "Select file first", "", "", "", None
237
+ out = analyze_and_plan(fname)
238
+ report_path = generate_incident_report(fname, out["summary"], out["issues"], out["suggestions"])
239
+ return out["summary"], "\n".join(out["issues"]), "\n".join(out["suggestions"]), out["plan_text"], gr.File.update(value=report_path)
240
+
241
+ btn_analyze.click(fn=analyze_selected, inputs=[files_list], outputs=[summary_box, issues_box, suggestions_box, plan_box, report_download])
242
+
243
+ def apply_action(fname, action_key):
244
+ if not fname:
245
+ return "Select file first"
246
+ res = apply_fix_action(fname, action_key)
247
+ return json.dumps(res, indent=2)
248
+
249
+ btn_apply.click(fn=apply_action, inputs=[files_list, apply_dropdown], outputs=[plan_box])
250
+
251
+ # -----------------------
252
+ # START MCP SERVERS
253
+ # -----------------------
254
+ def start_file_server():
255
+ uvicorn.run(file_app, host="0.0.0.0", port=FILE_SERVER_PORT, log_level="error")
256
+ def start_executor_server():
257
+ uvicorn.run(executor_app, host="0.0.0.0", port=EXECUTOR_PORT, log_level="error")
258
+ def start_memory_server():
259
+ uvicorn.run(memory_app, host="0.0.0.0", port=MEMORY_PORT, log_level="error")
260
+
261
+ threading.Thread(target=start_file_server, daemon=True).start()
262
+ threading.Thread(target=start_executor_server, daemon=True).start()
263
+ threading.Thread(target=start_memory_server, daemon=True).start()
264
 
265
  if __name__ == "__main__":
266
+ demo.launch(server_name="0.0.0.0", server_port=7860)
267
+