Spaces:
Sleeping
Sleeping
File size: 2,017 Bytes
31c3d3c 24a85ed 31c3d3c 5b5a67b 31c3d3c 0b8e23b 31c3d3c 24a85ed 0b8e23b 3baca57 0b8e23b 31c3d3c 0b8e23b 31c3d3c 0b8e23b 31c3d3c 0b8e23b 5b5a67b 0b8e23b 31c3d3c 24a85ed 0db8a05 24a85ed 0b8e23b 24a85ed 0db8a05 31c3d3c 0b8e23b 24a85ed 31c3d3c | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 | import gradio as gr
from huggingface_hub import InferenceClient
DFIR_SYSTEM_MESSAGE = (
"You are a DFIR and OSINT-focused assistant specializing in "
"digital forensics, incident response, malware analysis, "
"and investigative reasoning. Be precise, analytical, "
"methodical, and professional. Avoid speculation."
)
def respond(
message,
history: list[dict[str, str]],
dfir_mode: bool,
system_message,
max_tokens,
temperature,
top_p,
hf_token: gr.OAuthToken,
):
"""
Native Hugging Face inference using Gradio OAuth.
"""
if dfir_mode:
system_message = DFIR_SYSTEM_MESSAGE
temperature = 0.3
top_p = 0.9
client = InferenceClient(
model="mistralai/Mistral-7B-Instruct-v0.2",
token=hf_token.token,
)
messages = [{"role": "system", "content": system_message}]
messages.extend(history)
messages.append({"role": "user", "content": message})
response = ""
for chunk in client.chat_completion(
messages=messages,
max_tokens=max_tokens,
stream=True,
temperature=temperature,
top_p=top_p,
):
if chunk.choices and chunk.choices[0].delta.content:
response += chunk.choices[0].delta.content
yield response
chatbot = gr.ChatInterface(
respond,
type="messages",
additional_inputs=[
gr.Checkbox(
value=True,
label="DFIR Mode (forensic / analytical)",
),
gr.Textbox(
value="You are a helpful assistant.",
label="System message (ignored when DFIR Mode is ON)",
),
gr.Slider(1, 2048, value=512, step=1, label="Max new tokens"),
gr.Slider(0.1, 1.5, value=0.7, step=0.1, label="Temperature"),
gr.Slider(0.1, 1.0, value=0.95, step=0.05, label="Top-p"),
],
)
with gr.Blocks() as demo:
with gr.Sidebar():
gr.LoginButton()
chatbot.render()
if __name__ == "__main__":
demo.launch()
|