Spaces:
Runtime error
Runtime error
File size: 4,354 Bytes
7a8ac18 ab7e686 4667719 7a8ac18 4667719 7a8ac18 4667719 7a8ac18 4667719 7a8ac18 4667719 a8703fa 4667719 7a8ac18 4667719 7a8ac18 a8703fa 7a8ac18 a8703fa 7a8ac18 a8703fa 4667719 a8703fa 4667719 a8703fa 4667719 a8703fa 4667719 a8703fa 7a8ac18 a8703fa 4667719 a8703fa 4667719 a8703fa 4667719 a8703fa 4667719 7a8ac18 4667719 a8703fa ab7e686 4667719 ab7e686 7a8ac18 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 |
import os
import gradio as gr
from together import Together
# ----------------------------------------------------------------------------
# Configuration & Constants
# ----------------------------------------------------------------------------
MODEL_NAME = "meta-llama/Llama-4-Maverick-17B-128E-Instruct-FP8"
SYSTEM_PROMPT_BASE = (
"You are CyberGuard, a senior-level cybersecurity expert assistant. "
"You autonomously enforce security best practices, making informed decisions when rule-based policies fail."
)
# ----------------------------------------------------------------------------
# Together Client Initialization with API Key
# ----------------------------------------------------------------------------
api_key = os.environ.get("TOGETHER_API_KEY")
if not api_key:
raise ValueError("Missing TOGETHER_API_KEY environment variable")
together_client = Together(api_key=api_key)
# ----------------------------------------------------------------------------
# In-Memory Context Tracking
# ----------------------------------------------------------------------------
conversation_history = [] # List of (user_msg, assistant_msg)
context_summary = "" # Running summary of conversation
# ----------------------------------------------------------------------------
# Helper: Update Context Summary
# ----------------------------------------------------------------------------
def update_summary(history):
messages = [
{"role": "system", "content": SYSTEM_PROMPT_BASE},
{"role": "user", "content": (
"Summarize the following cybersecurity conversation in 3-5 bullet points, "
"focusing on key decisions and context:\n" +
"\n".join([f"user: {u}\nassistant: {a}" for u, a in history])
)}
]
resp = together_client.chat.completions.create(
model=MODEL_NAME,
messages=messages,
stream=False
)
return resp.choices[0].message.content.strip()
# ----------------------------------------------------------------------------
# Core Chat Functions for Gradio
# ----------------------------------------------------------------------------
def user_submit(user_input, history):
history = history or []
if not user_input:
return "", history
# Frame as cybersecurity expert
if not user_input.lower().startswith("as a cybersecurity expert"):
user_input = f"(As a cybersecurity expert) {user_input}"
# Append placeholder for assistant
history.append((user_input, ""))
return "", history
def assistant_stream(history):
global context_summary
if not history:
return history
# Build messages for model
model_msgs = [{"role": "system", "content": SYSTEM_PROMPT_BASE}]
if context_summary:
model_msgs[0]["content"] += f"\n\nPrevious summary:\n{context_summary}"
for user_msg, assistant_msg in history[:-1]:
model_msgs.append({"role": "user", "content": user_msg})
model_msgs.append({"role": "assistant", "content": assistant_msg})
# Current user turn
user_msg, _ = history[-1]
model_msgs.append({"role": "user", "content": user_msg})
# Stream from together
stream = together_client.chat.completions.create(
model=MODEL_NAME,
messages=model_msgs,
stream=True
)
for token in stream:
if hasattr(token, 'choices'):
delta = token.choices[0].delta.content
history[-1] = (history[-1][0], history[-1][1] + delta)
yield history
# After full response, update summary
context_summary = update_summary(history)
# ----------------------------------------------------------------------------
# Launch Gradio Interface
# ----------------------------------------------------------------------------
def launch_interface():
with gr.Blocks() as demo:
gr.Markdown("## CyberGuard – Autonomous Cybersecurity Chat")
chatbot = gr.Chatbot()
txt = gr.Textbox(show_label=False, placeholder="Enter your security query...")
txt.submit(user_submit, [txt, chatbot], [txt, chatbot], queue=False)
txt.submit(lambda: None, None, txt) # clear input
chatbot.stream(assistant_stream, chatbot)
demo.launch(share=True, server_name='0.0.0.0', server_port=7860)
if __name__ == "__main__":
launch_interface()
|