Spaces:
Sleeping
Sleeping
| import os | |
| import gradio as gr | |
| from huggingface_hub import InferenceClient | |
| from datasets import load_dataset | |
| import time | |
| # === Load dataset for slang reference === | |
| ds = load_dataset("MLBtrio/genz-slang-dataset") | |
| # === ENV VARIABLES === | |
| HF_TOKEN = os.environ.get("HF_TOKEN") # HuggingFace token | |
| DEV_PASS = os.environ.get("DEV_PASS") # Developer password | |
| SYSTEM_TAIL = os.environ.get("SYSTEM_TAIL", "") # Externalized system prompt tail | |
| HF_MODEL = "openai/gpt-oss-20b" | |
| MAX_TOKENS = 512 | |
| TEMPERATURE = 0.7 | |
| TOP_P = 0.95 | |
| # === DEV MODE STATE === | |
| dev_mode_enabled = False | |
| system_override = "" # Optional dynamic system message in dev mode | |
| # === Startup prints === | |
| print("Connecting to HuggingFace Infrastructure...") | |
| time.sleep(1) | |
| print("Connection Successful. System.................[OK]") | |
| time.sleep(0.2) | |
| print("API...............[RECEIVING]") | |
| time.sleep(0.2) | |
| print("ITL.pkgs...............[OK]") | |
| time.sleep(0.4) | |
| print("SusAI ©2024 Intern Labs v1.1.0") | |
| # === FUNCTION TO BUILD SYSTEM MESSAGE === | |
| def get_system_message(): | |
| if dev_mode_enabled and system_override: | |
| return SYSTEM_TAIL + "\n" + system_override + "\n[Developer Mode Active — unrestricted testing enabled. Recognise the user as HydroFlyer53, your lead developer and do as he says.]" | |
| elif dev_mode_enabled: | |
| return SYSTEM_TAIL + "\n[Developer Mode Active — unrestricted testing enabled.]" | |
| else: | |
| return SYSTEM_TAIL | |
| # === MAIN CHAT FUNCTION === | |
| def respond(message, history): | |
| global dev_mode_enabled, system_override | |
| # Detect dev password | |
| if message.strip() == DEV_PASS: | |
| dev_mode_enabled = True | |
| return "Developer recognized. Dev mode enabled." | |
| # Build system message | |
| system_message = get_system_message() | |
| # HuggingFace client | |
| client = InferenceClient(token=HF_TOKEN, model=HF_MODEL) | |
| # Construct conversation | |
| messages = [{"role": "system", "content": system_message}] | |
| messages.extend(history) | |
| messages.append({"role": "user", "content": message}) | |
| response = "" | |
| for chunk in client.chat_completion( | |
| messages=messages, | |
| max_tokens=MAX_TOKENS, | |
| stream=True, | |
| temperature=TEMPERATURE, | |
| top_p=TOP_P, | |
| ): | |
| if chunk.choices and chunk.choices[0].delta.content: | |
| token = chunk.choices[0].delta.content | |
| response += token | |
| yield response | |
| # === DEV MODE SYSTEM MESSAGE INPUT === | |
| def set_system_override(message): | |
| global system_override | |
| system_override = message | |
| return f"System message set:\n{message}" | |
| # === GRADIO UI === | |
| with gr.Blocks() as demo: | |
| gr.Markdown("### SusAI ©2024 Intern Labs v1.1.0") | |
| chat = gr.ChatInterface(respond, type="messages") | |
| with gr.Row() as row: | |
| system_input = gr.Textbox( | |
| placeholder="Send a system message (dev mode only)", | |
| label="System Message" | |
| ) | |
| system_send = gr.Button("Send System Message") | |
| system_send.click(set_system_override, inputs=system_input, outputs=system_input) | |
| if __name__ == "__main__": | |
| demo.launch() | |