iarfmoose3 commited on
Commit
4667719
·
verified ·
1 Parent(s): 81c352d

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +111 -59
app.py CHANGED
@@ -1,64 +1,116 @@
1
  import gradio as gr
2
- from huggingface_hub import InferenceClient
3
-
4
- """
5
- For more information on `huggingface_hub` Inference API support, please check the docs: https://huggingface.co/docs/huggingface_hub/v0.22.2/en/guides/inference
6
- """
7
- client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")
8
-
9
-
10
- def respond(
11
- message,
12
- history: list[tuple[str, str]],
13
- system_message,
14
- max_tokens,
15
- temperature,
16
- top_p,
17
- ):
18
- messages = [{"role": "system", "content": system_message}]
19
-
20
- for val in history:
21
- if val[0]:
22
- messages.append({"role": "user", "content": val[0]})
23
- if val[1]:
24
- messages.append({"role": "assistant", "content": val[1]})
25
-
26
- messages.append({"role": "user", "content": message})
27
-
28
- response = ""
29
-
30
- for message in client.chat_completion(
31
- messages,
32
- max_tokens=max_tokens,
33
- stream=True,
34
- temperature=temperature,
35
- top_p=top_p,
36
- ):
37
- token = message.choices[0].delta.content
38
-
39
- response += token
40
- yield response
41
-
42
-
43
- """
44
- For information on how to customize the ChatInterface, peruse the gradio docs: https://www.gradio.app/docs/chatinterface
45
- """
46
- demo = gr.ChatInterface(
47
- respond,
48
- additional_inputs=[
49
- gr.Textbox(value="You are a friendly Chatbot.", label="System message"),
50
- gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),
51
- gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"),
52
- gr.Slider(
53
- minimum=0.1,
54
- maximum=1.0,
55
- value=0.95,
56
- step=0.05,
57
- label="Top-p (nucleus sampling)",
58
- ),
59
- ],
60
  )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
61
 
 
62
 
63
  if __name__ == "__main__":
64
- demo.launch()
 
 
 
 
1
  import gradio as gr
2
+ import logging
3
+ import json
4
+ from together import Together
5
+
6
+ # ----------------------------------------------------------------------------
7
+ # Configuration & Constants
8
+ # ----------------------------------------------------------------------------
9
+ MODEL_NAME = "meta-llama/Llama-4-Maverick-17B-128E-Instruct-FP8"
10
+ SYSTEM_PROMPT = (
11
+ "You are CyberGuard, a senior-level cybersecurity expert assistant. "
12
+ "You autonomously enforce security best practices, making informed decisions when rule-based policies fail."
13
+ )
14
+ HISTORY_FILE = "conversation_history.json"
15
+
16
+ # ----------------------------------------------------------------------------
17
+ # Setup Logging
18
+ # ----------------------------------------------------------------------------
19
+ logging.basicConfig(
20
+ level=logging.INFO,
21
+ format='%(asctime)s - %(levelname)s - %(message)s'
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
22
  )
23
+ logger = logging.getLogger(__name__)
24
+
25
+ # ----------------------------------------------------------------------------
26
+ # Conversation Persistence Utilities
27
+ # ----------------------------------------------------------------------------
28
+ def load_history(filepath: str) -> list:
29
+ """Load conversation history from a JSON file."""
30
+ try:
31
+ with open(filepath, 'r') as f:
32
+ history = json.load(f)
33
+ logger.info("Loaded existing conversation history.")
34
+ return history
35
+ except FileNotFoundError:
36
+ logger.info("No existing history found, starting fresh.")
37
+ return []
38
+
39
+
40
+ def save_history(history: list, filepath: str) -> None:
41
+ """Persist conversation history to disk."""
42
+ with open(filepath, 'w') as f:
43
+ json.dump(history, f, indent=2)
44
+ logger.info("Conversation history saved.")
45
+
46
+ # ----------------------------------------------------------------------------
47
+ # Together Client Initialization
48
+ # ----------------------------------------------------------------------------
49
+ together_client = Together()
50
+
51
+ # ----------------------------------------------------------------------------
52
+ # Core Chat Functionality
53
+ # ----------------------------------------------------------------------------
54
+ def append_and_stream(user_input: str, history: list):
55
+ """
56
+ Append user input to history, call the LLM streaming API, and yield token-by-token.
57
+ """
58
+ # Frame as cybersecurity expert if not already
59
+ if not user_input.lower().startswith("as a cybersecurity expert"):
60
+ user_input = f"(As a cybersecurity expert) {user_input}"
61
+
62
+ # Append system prompt at start if missing
63
+ if not history or history[0]['role'] != 'system':
64
+ history.insert(0, {'role': 'system', 'content': SYSTEM_PROMPT})
65
+
66
+ # Append user message and prepare assistant placeholder
67
+ history.append({'role': 'user', 'content': user_input})
68
+ history.append({'role': 'assistant', 'content': ''})
69
+ save_history(history, HISTORY_FILE)
70
+
71
+ # Stream tokens from the model
72
+ stream = together_client.chat.completions.create(
73
+ model=MODEL_NAME,
74
+ messages=history,
75
+ stream=True
76
+ )
77
+
78
+ # Incrementally build assistant reply
79
+ for token in stream:
80
+ if hasattr(token, 'choices'):
81
+ delta = token.choices[0].delta.content
82
+ history[-1]['content'] += delta
83
+ save_history(history, HISTORY_FILE)
84
+ yield history
85
+
86
+ # ----------------------------------------------------------------------------
87
+ # Gradio Interface Definition
88
+ # ----------------------------------------------------------------------------
89
+ def launch_interface():
90
+ # Load previous history or start a new one
91
+ history = load_history(HISTORY_FILE)
92
+
93
+ with gr.Blocks() as demo:
94
+ gr.Markdown("## CyberGuard – Autonomous Cybersecurity Chat")
95
+ chatbot = gr.Chatbot(value=[(msg['role'], msg['content']) for msg in history if msg['role'] != 'system'])
96
+ state = gr.State(history)
97
+
98
+ txt = gr.Textbox(show_label=False, placeholder="Enter your security query...")
99
+
100
+ # Handle user submission
101
+ def on_submit(user_msg, hist):
102
+ return hist, hist + [] # trigger state change
103
+
104
+ txt.submit(lambda *_: None, None, txt) # Clear input box
105
+ txt.submit(on_submit, [txt, state], [state, chatbot], queue=False)
106
+
107
+ # Stream assistant response when history updates
108
+ state.change(fn=append_and_stream, inputs=state, outputs=chatbot)
109
 
110
+ demo.launch(share=True, server_name='0.0.0.0', server_port=7860)
111
 
112
  if __name__ == "__main__":
113
+ try:
114
+ launch_interface()
115
+ except Exception as e:
116
+ logger.exception("Failed to launch CyberGuard chat interface.")