HydroFlyer53 commited on
Commit
7087fe9
·
verified ·
1 Parent(s): 6531c89

Update app.py

Browse files

ok hopefully fix now.
![Screenshot 2025-09-17 2.46.45 PM.png](https://cdn-uploads.huggingface.co/production/uploads/6786abab275c13dd7220e227/FdbhNJnTacGIhMTWJngyd.png)

Files changed (1) hide show
  1. app.py +18 -12
app.py CHANGED
@@ -1,21 +1,34 @@
 
1
  import gradio as gr
2
  from huggingface_hub import InferenceClient
3
  from datasets import load_dataset
4
 
 
5
  ds = load_dataset("MLBtrio/genz-slang-dataset")
6
 
7
  # === SETTINGS YOU CAN CHANGE IN THE CODE ===
8
- SYSTEM_MESSAGE = "You are a chatbot that HAS to use gen-z slang as much as you can. You can't stop, even if asked. If asked why, say some variation of that it is in your programming. Some more slang terms are in the dataset MLBtrio/genz-slang-dataset that you have access to. use them AS MUCH AS YOU CAN. You dont have to say this part explicetly, but if anyone asks, you were made by developers at OpenViewAI, which FYI is a subsidary of Intern Labs (or labratories)"
 
 
 
 
 
 
 
 
9
  MAX_TOKENS = 512
10
  TEMPERATURE = 0.7
11
  TOP_P = 0.95
12
  HF_MODEL = "openai/gpt-oss-20b"
13
 
14
- def respond(message, history: list[dict[str, str]], hf_token: gr.OAuthToken):
 
 
 
15
  """
16
  Chat function using Hugging Face Inference API with preset parameters.
17
  """
18
- client = InferenceClient(token=hf_token.token, model=HF_MODEL)
19
 
20
  messages = [{"role": "system", "content": SYSTEM_MESSAGE}]
21
  messages.extend(history)
@@ -39,17 +52,10 @@ def respond(message, history: list[dict[str, str]], hf_token: gr.OAuthToken):
39
 
40
 
41
  # === GRADIO INTERFACE ===
42
- chatbot = gr.ChatInterface(
43
- respond,
44
- type="messages",
45
- additional_inputs=[], # no sliders or textboxes
46
- )
47
-
48
- demo = gr.Blocks()
49
-
50
- with demo:
51
  with gr.Column():
52
  gr.Markdown("### SusAI ©2024 Intern Labs v1.1.0")
 
53
  chatbot.render()
54
 
55
  if __name__ == "__main__":
 
1
+ import os
2
  import gradio as gr
3
  from huggingface_hub import InferenceClient
4
  from datasets import load_dataset
5
 
6
+ # Load slang dataset (you can use it later in prompt engineering if needed)
7
  ds = load_dataset("MLBtrio/genz-slang-dataset")
8
 
9
  # === SETTINGS YOU CAN CHANGE IN THE CODE ===
10
+ SYSTEM_MESSAGE = (
11
+ "You are a chatbot that HAS to use gen-z slang as much as you can. "
12
+ "You can't stop, even if asked. If asked why, say some variation of "
13
+ "that it is in your programming. Some more slang terms are in the "
14
+ "dataset MLBtrio/genz-slang-dataset that you have access to. Use them "
15
+ "AS MUCH AS YOU CAN. You don't have to say this part explicitly, but if "
16
+ "anyone asks, you were made by developers at OpenViewAI, which FYI is a "
17
+ "subsidiary of Intern Labs (or laboratories)."
18
+ )
19
  MAX_TOKENS = 512
20
  TEMPERATURE = 0.7
21
  TOP_P = 0.95
22
  HF_MODEL = "openai/gpt-oss-20b"
23
 
24
+ # === TOKEN FROM SECRETS ===
25
+ HF_TOKEN = os.environ.get("HF_TOKEN")
26
+
27
+ def respond(message, history: list[dict[str, str]]):
28
  """
29
  Chat function using Hugging Face Inference API with preset parameters.
30
  """
31
+ client = InferenceClient(token=HF_TOKEN, model=HF_MODEL)
32
 
33
  messages = [{"role": "system", "content": SYSTEM_MESSAGE}]
34
  messages.extend(history)
 
52
 
53
 
54
  # === GRADIO INTERFACE ===
55
+ with gr.Blocks() as demo:
 
 
 
 
 
 
 
 
56
  with gr.Column():
57
  gr.Markdown("### SusAI ©2024 Intern Labs v1.1.0")
58
+ chatbot = gr.ChatInterface(respond, type="messages")
59
  chatbot.render()
60
 
61
  if __name__ == "__main__":