spanofzero commited on
Commit
9dac727
·
verified ·
1 Parent(s): a56a90d
Files changed (1) hide show
  1. app.py +20 -19
app.py CHANGED
@@ -2,19 +2,19 @@ import gradio as gr
2
  from huggingface_hub import InferenceClient
3
  import os
4
 
5
- # Secret Token from your Settings
6
  HF_TOKEN = os.getenv("HF_TOKEN")
7
 
8
- # The Brain: Llama-3-8B
9
  client = InferenceClient("meta-llama/Meta-Llama-3-8B-Instruct", token=HF_TOKEN)
10
 
11
  def samaran_kernel_chat(message, history):
12
- system_message = "You are T3Sam3, the Samaran Kernel. You are a blue-themed, high-logic upgrade. Be concise and witty."
13
 
 
14
  messages = [{"role": "system", "content": system_message}]
15
- for user_msg, ai_msg in history:
16
- messages.append({"role": "user", "content": user_msg})
17
- messages.append({"role": "assistant", "content": ai_msg})
18
  messages.append({"role": "user", "content": message})
19
 
20
  response = ""
@@ -32,26 +32,27 @@ def samaran_kernel_chat(message, history):
32
  custom_css = """
33
  .gradio-container { background-color: #0b0f19 !important; color: white !important; }
34
  footer {visibility: hidden !important}
35
- .message.user { background-color: #1e293b !important; }
36
- .message.bot { background-color: #0f172a !important; color: #60a5fa !important; }
37
  """
38
 
39
  # Build the Interface
40
- with gr.Blocks() as demo:
41
  gr.Markdown("# T3Sam3")
 
42
  gr.ChatInterface(
43
  fn=samaran_kernel_chat,
44
  description="Samaran Kernel Intelligence Layer",
45
- # Updated button names for Gradio 6.0
46
- clear_btn=None,
47
- undo_btn=None,
48
- retry_btn=None,
49
- examples=["How does AI work?", "Are black holes real?", "What is the meaning of life?"]
 
 
 
 
50
  )
51
 
52
- # Launch with the theme and CSS passed here (the new way)
53
  if __name__ == "__main__":
54
- demo.launch(
55
- theme=gr.themes.Soft(primary_hue="blue", secondary_hue="slate"),
56
- css=custom_css
57
- )
 
2
  from huggingface_hub import InferenceClient
3
  import os
4
 
5
+ # Pulling your HF Token secret
6
  HF_TOKEN = os.getenv("HF_TOKEN")
7
 
8
+ # The Brain
9
  client = InferenceClient("meta-llama/Meta-Llama-3-8B-Instruct", token=HF_TOKEN)
10
 
11
  def samaran_kernel_chat(message, history):
12
+ system_message = "You are T3Sam3, the Samaran Kernel. Provide deep, blue-tier logic. Be witty and technical."
13
 
14
+ # Gradio 6.0 uses a message list format
15
  messages = [{"role": "system", "content": system_message}]
16
+ for msg in history:
17
+ messages.append(msg)
 
18
  messages.append({"role": "user", "content": message})
19
 
20
  response = ""
 
32
  custom_css = """
33
  .gradio-container { background-color: #0b0f19 !important; color: white !important; }
34
  footer {visibility: hidden !important}
35
+ .message.user { background-color: #1e293b !important; border: 1px solid #3b82f6 !important; }
36
+ .message.assistant { background-color: #0f172a !important; color: #60a5fa !important; }
37
  """
38
 
39
  # Build the Interface
40
+ with gr.Blocks(css=custom_css, theme=gr.themes.Soft(primary_hue="blue", secondary_hue="slate")) as demo:
41
  gr.Markdown("# T3Sam3")
42
+
43
  gr.ChatInterface(
44
  fn=samaran_kernel_chat,
45
  description="Samaran Kernel Intelligence Layer",
46
+ # In Gradio 6, these buttons are automatically managed or removed
47
+ # We use 'examples' to match your screenshot
48
+ examples=[
49
+ "How does AI work?",
50
+ "Are black holes real?",
51
+ "How many Rs are in the word 'strawberry'?",
52
+ "What is the meaning of life?"
53
+ ],
54
+ type="messages" # Ensures compatibility with the new data format
55
  )
56
 
 
57
  if __name__ == "__main__":
58
+ demo.launch()