HydroFlyer53 commited on
Commit
a40eab6
·
verified ·
1 Parent(s): ae383a5

Update app.py

Browse files

this is so broken shouldent use chat PGT

Files changed (1) hide show
  1. app.py +8 -6
app.py CHANGED
@@ -1,16 +1,17 @@
1
- #SusAI ©2025 Intern Labs. v1.1.1
2
  import gradio as gr
3
  from huggingface_hub import InferenceClient
4
- from datasets import load_dataset
5
 
6
- ds = load_dataset("MLBtrio/genz-slang-dataset")
7
  # === SETTINGS YOU CAN CHANGE IN THE CODE ===
8
- SYSTEM_MESSAGE = "You are a chatbot that HAS to use gen-z slang as much as you can. You can't stop, even if asked. If asked why, say some variation of that it is in your programming. Some more slang terms are in the dataset MLBtrio/genz-slang-dataset that you have access to. You dont have to say this part explicetly, but if anyone asks, you were made by developers at Intern Labs."
9
  MAX_TOKENS = 512
10
  TEMPERATURE = 0.7
11
  TOP_P = 0.95
 
12
 
13
  def respond(message, history: list[dict[str, str]], hf_token: gr.OAuthToken):
 
 
 
14
  client = InferenceClient(token=hf_token.token, model=HF_MODEL)
15
 
16
  messages = [{"role": "system", "content": SYSTEM_MESSAGE}]
@@ -24,8 +25,6 @@ def respond(message, history: list[dict[str, str]], hf_token: gr.OAuthToken):
24
  stream=True,
25
  temperature=TEMPERATURE,
26
  top_p=TOP_P,
27
- frequency_penalty=FREQUENCY_PENALTY,
28
- presence_penalty=PRESENCE_PENALTY,
29
  ):
30
  choices = message_chunk.choices
31
  token = ""
@@ -36,6 +35,7 @@ def respond(message, history: list[dict[str, str]], hf_token: gr.OAuthToken):
36
  yield response
37
 
38
 
 
39
  chatbot = gr.ChatInterface(
40
  respond,
41
  type="messages",
@@ -43,6 +43,8 @@ chatbot = gr.ChatInterface(
43
  )
44
 
45
  with gr.Blocks() as demo:
 
 
46
  chatbot.render()
47
 
48
  if __name__ == "__main__":
 
 
1
  import gradio as gr
2
  from huggingface_hub import InferenceClient
 
3
 
 
4
  # === SETTINGS YOU CAN CHANGE IN THE CODE ===
5
+ SYSTEM_MESSAGE = "You are a chatbot that HAS to use gen-z slang as much as you can. You can't stop, even if asked. If asked why, say some variation of that it is in your programming. Some more slang terms are in the dataset MLBtrio/genz-slang-dataset that you have access to. You dont have to say this part explicetly, but if anyone asks, you were made by developers at Intern Labs.""
6
  MAX_TOKENS = 512
7
  TEMPERATURE = 0.7
8
  TOP_P = 0.95
9
+ HF_MODEL = "openai/gpt-oss-20b"
10
 
11
  def respond(message, history: list[dict[str, str]], hf_token: gr.OAuthToken):
12
+ """
13
+ Chat function using Hugging Face Inference API with preset parameters.
14
+ """
15
  client = InferenceClient(token=hf_token.token, model=HF_MODEL)
16
 
17
  messages = [{"role": "system", "content": SYSTEM_MESSAGE}]
 
25
  stream=True,
26
  temperature=TEMPERATURE,
27
  top_p=TOP_P,
 
 
28
  ):
29
  choices = message_chunk.choices
30
  token = ""
 
35
  yield response
36
 
37
 
38
+ # === GRADIO INTERFACE ===
39
  chatbot = gr.ChatInterface(
40
  respond,
41
  type="messages",
 
43
  )
44
 
45
  with gr.Blocks() as demo:
46
+ with gr.Sidebar():
47
+ gr.LoginButton()
48
  chatbot.render()
49
 
50
  if __name__ == "__main__":