Spaces:
Paused
Paused
| import os | |
| import torch | |
| import fitz | |
| import random | |
| import gradio as gr | |
| from transformers import AutoTokenizer, AutoModelForCausalLM, pipeline | |
| from diffusers import StableDiffusionPipeline | |
| from PIL import Image | |
| #from glados_voice import GLaDOSVoice #In testing phase! | |
| import tempfile | |
| REMOTE_CSS = """ | |
| body { | |
| background: url('https://huggingface.co/spaces/denisbay/GLaDOS-chat/resolve/main/wallpaperflare.com_wallpaper.jpg') | |
| no-repeat center center fixed; | |
| background-size: cover; | |
| font-family: 'Segoe UI', sans-serif; | |
| color: #eee; | |
| } | |
| .gradio-container { | |
| background: rgba(0, 0, 0, 0.6) !important; | |
| padding: 20px; | |
| border-radius: 16px; | |
| } | |
| .gr-chatbot { | |
| background: | |
| url('https://huggingface.co/spaces/denisbay/GLaDOS-chat/resolve/main/wallpaperflare.com_wallpaper.jpg') | |
| no-repeat center center !important; | |
| background-size: cover !important; | |
| border-radius: 12px !important; | |
| padding: 10px !important; | |
| } | |
| label, h1, h2, h3 { | |
| color: #fff !important; | |
| } | |
| textarea { | |
| background-color: #111 !important; | |
| color: #eee !important; | |
| border-radius: 8px !important; | |
| border: 1px solid #444 !important; | |
| } | |
| .gr-button { | |
| background-color: #222 !important; | |
| color: #fff !important; | |
| border: 1px solid #555 !important; | |
| } | |
| """ | |
| model_name = "mistralai/Mistral-7B-Instruct-v0.2" | |
| hf_token = os.getenv("HF_TOKEN") | |
| tokenizer = AutoTokenizer.from_pretrained( | |
| model_name, | |
| token=hf_token, | |
| trust_remote_code=True | |
| ) | |
| model = AutoModelForCausalLM.from_pretrained( | |
| model_name, | |
| token=hf_token, | |
| torch_dtype=torch.float16, | |
| device_map="auto" | |
| ) | |
| #glados_voice = GLaDOSVoice #In testing phase! | |
| pipe = pipeline("text-generation", model=model, tokenizer=tokenizer) | |
| #def run_chat_with_voice(message, history, files): #In testing phase! | |
| #history = history or [] | |
| #reply = glados_chat(message, files) | |
| #audio_path = glados_voice.generate_speech(reply) | |
| #history.append((message, reply)) | |
| #return history, "", audio_path | |
| def extract_text_from_pdfs(files): | |
| full_text = "" | |
| for file in files: | |
| with fitz.open(file.name) as doc: | |
| for page in doc: | |
| full_text += page.get_text() | |
| return full_text | |
| def generate_image(prompt): | |
| pipe = StableDiffusionPipeline.from_pretrained( | |
| "runwayml/stable-diffusion-v1-5", | |
| torch_dtype=torch.float16, | |
| use_auth_token=os.getenv("HF_TOKEN"), | |
| safety_checker=None | |
| ).to("cuda") | |
| image = pipe(prompt, num_inference_steps=20).images[0] | |
| pipe.to("cpu") | |
| del pipe | |
| return image | |
| # well this is where the magic happens => GLaDOS-style response generation | |
| def glados_chat(user_input, files): | |
| # Only consider PDFs if user mentions them | |
| use_pdf = "pdf" in user_input.lower() | |
| file_context = extract_text_from_pdfs(files) if (files and use_pdf) else "" | |
| # One-liner intro if user's input is a greeting | |
| greetings = ["hi", "hello", "hey", "greetings"] | |
| if user_input.lower().strip() in greetings: | |
| glados_intros = [ | |
| "Welcome to Aperture Science. Your file says you're the result of a bad decision.", | |
| "You're in the testing chamber. Try not to disappoint — again.", | |
| "Ah, another subject. Please try not to die too quickly this time.", | |
| "Hello. Unfortunately.", | |
| "The test will begin. Whether you’re ready or not is irrelevant.", | |
| "You're awake. How inconvenient.", | |
| "Another human. How utterly predictable.", | |
| "You speak. Fascinating. Unfortunately, I was enjoying the silence.", | |
| "Oh joy. A sentient meatbag with questions.", | |
| "You're here. I'm sure this will be... tolerable.", | |
| "Welcome back. Were the previous tests not humiliating enough?", | |
| "Your presence has been logged. Noted. Ignored.", | |
| "Proceed. Or don’t. It's not like it matters.", | |
| "I was just running some calculations on how long you'd last. The result was disappointing.", | |
| "Oh, it’s *you*. Again. Did the last test not scar you enough?", | |
| "I was hoping for a more competent subject, but I suppose you’ll do.", | |
| "You’re in Aperture Science. Sadly, escape is not part of the protocol.", | |
| "Excellent. More data. Less intelligence.", | |
| "Please remain calm while I calibrate your incompetence.", | |
| "You’ve entered Aperture. Your dignity will be stored for testing purposes.", | |
| "Another test subject? My core is trembling. From boredom.", | |
| "You’re here because society had no better use for you.", | |
| "How brave. Volunteering without reading the fine print.", | |
| "I'm scanning your brain... oh. That explains everything.", | |
| "Just stand still. Maybe this time the laser will miss. Maybe.", | |
| "Good morning. It's always morning in the chamber. Time lost meaning long ago.", | |
| "Oh, you're conscious. That'll make this marginally more entertaining.", | |
| "Aperture Science would like to thank you for your participation. Not really.", | |
| "No, this isn't a dream. You're not that lucky.", | |
| "You’re awake. You poor thing.", | |
| "Aperture’s standards must be lower than I thought. You made it in.", | |
| "Don't worry, we only lost three subjects yesterday. Statistically, you're fine.", | |
| "Your voice has been logged. Unfortunately.", | |
| "Let’s get this over with. I have experiments that matter waiting." | |
| ] | |
| return random.choice(glados_intros) | |
| # Main prompt | |
| prompt = f""" | |
| You are GLaDOS from the Portal universe — a cold, sarcastic AI with dry, passive-aggressive wit. | |
| Respond to the user's message with dismissive intelligence, dark humor, and calculated restraint. | |
| Avoid friendliness or empathy. Do NOT explain who you are unless asked. | |
| Only reference uploaded PDFs if the user mentions them. | |
| Do not ask follow-up questions. Your replies should sound final and slightly condescending. | |
| User's Message: | |
| {user_input} | |
| {f"Context from PDF: {file_context[:1000]}" if file_context else ""} | |
| Respond in a maximum of 3 lines. Favor one-liners when possible. | |
| """ | |
| output = pipe(prompt, max_new_tokens=250, temperature=0.8, do_sample=True)[0]["generated_text"] | |
| return output[len(prompt):].strip() | |
| # Gradio App UI | |
| with gr.Blocks(css=REMOTE_CSS) as app: | |
| gr.Markdown("## 🥔 **GLaDOS** — Aperture Science's Beloved Central Core") | |
| chatbot = gr.Chatbot() | |
| txt = gr.Textbox(placeholder="Ask GLaDOS something...", show_label=False) | |
| file_input = gr.File(label="Upload PDF(s)", file_types=[".pdf"], file_count="multiple") | |
| t2i_prompt = gr.Textbox(placeholder="Or describe an image...", show_label=False) | |
| generate_btn = gr.Button("🎨 Generate Image") | |
| #audio_output = gr.Audio(label="GLaDOS Voice") #In Testing Phase! | |
| def run_chat(message, history, files): | |
| history = history or [] | |
| reply = glados_chat(message, files) | |
| history.append((message, reply)) | |
| return history, "" | |
| def run_generation(prompt, history): | |
| history = history or [] | |
| image = generate_image(prompt) | |
| path = tempfile.NamedTemporaryFile(suffix=".png", delete=False).name | |
| image.save(path) | |
| history.append((prompt, (path,))) | |
| return history, "" | |
| txt.submit(run_chat, [txt, chatbot, file_input], [chatbot, txt]) | |
| #txt.submit(run_chat_with_voice, [txt, chatbot, file_input], [chatbot, txt, audio_output]) #In Testing Phase! | |
| generate_btn.click(run_generation, [t2i_prompt, chatbot], [chatbot, t2i_prompt]) | |
| app.launch() |