S-Dreamer commited on
Commit
866d1e1
·
verified ·
1 Parent(s): 087d6c0

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +112 -53
app.py CHANGED
@@ -1,70 +1,129 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  import gradio as gr
2
  from huggingface_hub import InferenceClient
3
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
4
 
5
- def respond(
6
- message,
7
- history: list[dict[str, str]],
8
- system_message,
9
- max_tokens,
10
- temperature,
11
- top_p,
12
- hf_token: gr.OAuthToken,
13
- ):
 
 
 
 
 
 
 
 
 
 
 
 
 
 
14
  """
15
- For more information on `huggingface_hub` Inference API support, please check the docs: https://huggingface.co/docs/huggingface_hub/v0.22.2/en/guides/inference
 
16
  """
17
- client = InferenceClient(token=hf_token.token, model="openai/gpt-oss-20b")
 
 
 
18
 
19
  messages = [{"role": "system", "content": system_message}]
20
-
21
  messages.extend(history)
22
-
23
  messages.append({"role": "user", "content": message})
24
 
25
- response = ""
26
 
27
- for message in client.chat_completion(
28
- messages,
29
- max_tokens=max_tokens,
30
- stream=True,
31
  temperature=temperature,
32
  top_p=top_p,
 
 
33
  ):
34
- choices = message.choices
35
- token = ""
36
- if len(choices) and choices[0].delta.content:
37
- token = choices[0].delta.content
38
 
39
- response += token
40
- yield response
41
 
42
-
43
- """
44
- For information on how to customize the ChatInterface, peruse the gradio docs: https://www.gradio.app/docs/chatinterface
45
- """
46
- chatbot = gr.ChatInterface(
47
- respond,
48
- type="messages",
49
- additional_inputs=[
50
- gr.Textbox(value="You are a friendly Chatbot.", label="System message"),
51
- gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),
52
- gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"),
53
- gr.Slider(
54
- minimum=0.1,
55
- maximum=1.0,
56
- value=0.95,
57
- step=0.05,
58
- label="Top-p (nucleus sampling)",
59
- ),
60
- ],
61
- )
62
-
63
- with gr.Blocks() as demo:
64
- with gr.Sidebar():
65
- gr.LoginButton()
66
- chatbot.render()
67
-
68
-
69
- if __name__ == "__main__":
70
- demo.launch()
 
1
+ #!/usr/bin/env python
2
+ """
3
+ Parrot OSINT MCP – Gradio Frontend
4
+
5
+ Modes:
6
+ - "OSINT Dashboard" (multi-tool, opinionated)
7
+ - "MCP Bridge" (raw tool_name + JSON args → JSON result)
8
+ - "Analyst Copilot" (streaming LLM with OSINT context injection)
9
+ """
10
+
11
+ import json
12
+ import traceback
13
+ from typing import Any, Dict
14
+
15
  import gradio as gr
16
  from huggingface_hub import InferenceClient
17
 
18
+ # ---------------------------------------------------------------------
19
+ # Task registry: adapt this to your actual task API
20
+ # ---------------------------------------------------------------------
21
+
22
+ TASK_REGISTRY: Dict[str, Any] = {}
23
+
24
+ def _register_tasks() -> None:
25
+ def _try_register(name: str, module_name: str):
26
+ try:
27
+ module = __import__(f"tasks.{module_name}", fromlist=["*"])
28
+ fn = getattr(module, "run", None)
29
+ if callable(fn):
30
+ TASK_REGISTRY[name] = fn
31
+ except Exception:
32
+ pass
33
+
34
+ _try_register("lookup_ip", "lookup_ip")
35
+ _try_register("lookup_domain", "lookup_domain")
36
+ _try_register("lookup_hash", "lookup_hash")
37
+ _try_register("correlate_iocs", "correlate_iocs")
38
+ _try_register("generate_report", "generate_report")
39
+ _try_register("enrich_entity", "enrich_entity")
40
+ _try_register("mitre_map", "mitre_map")
41
+ _try_register("quickscan", "quickscan")
42
+
43
+ _register_tasks()
44
+
45
+
46
+ # ---------------------------------------------------------------------
47
+ # Core execution helpers
48
+ # ---------------------------------------------------------------------
49
+
50
+ def call_task(tool_name: str, payload: Dict[str, Any]) -> Dict[str, Any]:
51
+ fn = TASK_REGISTRY.get(tool_name)
52
+ if not fn:
53
+ return {
54
+ "error": f"Unknown tool '{tool_name}'. Registered tools: {sorted(TASK_REGISTRY.keys())}"
55
+ }
56
+
57
+ try:
58
+ result = fn(**payload)
59
+ if not isinstance(result, dict):
60
+ result = {"result": result}
61
+ return result
62
+ except Exception as exc:
63
+ return {
64
+ "error": f"Exception in tool '{tool_name}': {exc}",
65
+ "traceback": traceback.format_exc(),
66
+ }
67
+
68
+
69
+ def format_result_for_ui(result: Dict[str, Any]) -> Dict[str, str]:
70
+ pretty_json = json.dumps(result, indent=2, default=str)
71
 
72
+ markdown = result.get("markdown") or result.get("report") or ""
73
+ if not markdown and "summary" in result:
74
+ markdown = f"## Summary\n\n{result['summary']}"
75
+
76
+ mitre = json.dumps(result.get("mitre", ""), indent=2, default=str) if result.get("mitre") else ""
77
+ stix = json.dumps(result.get("stix", ""), indent=2, default=str) if result.get("stix") else ""
78
+ sarif = json.dumps(result.get("sarif", ""), indent=2, default=str) if result.get("sarif") else ""
79
+
80
+ return {
81
+ "summary": result.get("summary", ""),
82
+ "markdown": markdown,
83
+ "json": pretty_json,
84
+ "mitre": mitre,
85
+ "stix": stix,
86
+ "sarif": sarif,
87
+ }
88
+
89
+
90
+ # ---------------------------------------------------------------------
91
+ # MODE C — ANALYST COPILOT (LLM)
92
+ # ---------------------------------------------------------------------
93
+
94
+ def respond(message, history, system_message, model, hf_token, temperature, top_p, max_tokens):
95
  """
96
+ Streaming LLM response using HuggingFace InferenceClient.
97
+ Supports injecting OSINT task results into the conversation.
98
  """
99
+ client = InferenceClient(
100
+ token=hf_token.token,
101
+ model=model,
102
+ )
103
 
104
  messages = [{"role": "system", "content": system_message}]
 
105
  messages.extend(history)
 
106
  messages.append({"role": "user", "content": message})
107
 
108
+ response_text = ""
109
 
110
+ for chunk in client.chat_completion(
111
+ messages=messages,
 
 
112
  temperature=temperature,
113
  top_p=top_p,
114
+ max_tokens=max_tokens,
115
+ stream=True
116
  ):
117
+ delta = chunk.choices[0].delta.content
118
+ if delta:
119
+ response_text += delta
120
+ yield response_text
121
 
 
 
122
 
123
+ def inject_osint_context(history, task_result: Dict[str, Any]):
124
+ """
125
+ Inject JSON + summary + MITRE mappings directly into the chat history.
126
+ """
127
+ pretty = json.dumps(task_result, indent=2, default=str)
128
+ blob = f"""
129
+ ### OSINT Result Injected: