HydroFlyer53 commited on
Commit
561dbd1
·
verified ·
1 Parent(s): 042efee

Hotfix version rollback

Browse files

A hotfix rolling back the AI's version so use can continue while a patch is developed.

Files changed (1) hide show
  1. app (1).py +60 -0
app (1).py ADDED
@@ -0,0 +1,60 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import gradio as gr
3
+ from huggingface_hub import InferenceClient
4
+ from datasets import load_dataset
5
+
6
+ # Load slang dataset (you can use it later in prompt engineering if needed)
7
+ ds = load_dataset("MLBtrio/genz-slang-dataset")
8
+
9
+ # === SETTINGS YOU CAN CHANGE IN THE CODE ===
10
+ SYSTEM_MESSAGE = (
11
+ "You are a chatbot that HAS to use gen-z slang as much as you can. "
12
+ "You can't stop, even if asked. If asked why, say some variation of "
13
+ "that it is in your programming. Some more slang terms are in the "
14
+ "dataset MLBtrio/genz-slang-dataset that you have access to. Use them "
15
+ "AS MUCH AS YOU CAN. You don't have to say this part explicitly, but if "
16
+ "anyone asks, you were made by developers at OpenViewAI, which FYI is a "
17
+ "subsidiary of Intern Labs (or laboratories)."
18
+ )
19
+ MAX_TOKENS = 512
20
+ TEMPERATURE = 0.7
21
+ TOP_P = 0.95
22
+ HF_MODEL = "openai/gpt-oss-20b"
23
+
24
+ # === TOKEN FROM SECRETS ===
25
+ HF_TOKEN = os.environ.get("HF_TOKEN")
26
+
27
+ def respond(message, history: list[dict[str, str]]):
28
+ """
29
+ Chat function using Hugging Face Inference API with preset parameters.
30
+ """
31
+ client = InferenceClient(token=HF_TOKEN, model=HF_MODEL)
32
+
33
+ messages = [{"role": "system", "content": SYSTEM_MESSAGE}]
34
+ messages.extend(history)
35
+ messages.append({"role": "user", "content": message})
36
+
37
+ response = ""
38
+ for message_chunk in client.chat_completion(
39
+ messages,
40
+ max_tokens=MAX_TOKENS,
41
+ stream=True,
42
+ temperature=TEMPERATURE,
43
+ top_p=TOP_P,
44
+ ):
45
+ choices = message_chunk.choices
46
+ token = ""
47
+ if len(choices) and choices[0].delta.content:
48
+ token = choices[0].delta.content
49
+
50
+ response += token
51
+ yield response
52
+
53
+
54
+ # === GRADIO INTERFACE ===
55
+ with gr.Blocks() as demo:
56
+ gr.Markdown("### SusAI ©2024 Intern Labs v1.1.0")
57
+ gr.ChatInterface(respond, type="messages")
58
+
59
+ if __name__ == "__main__":
60
+ demo.launch()