spanofzero commited on
Commit
8007595
·
verified ·
1 Parent(s): 9dac727

test3 stable

Browse files
Files changed (1) hide show
  1. app.py +13 -15
app.py CHANGED
@@ -5,19 +5,21 @@ import os
5
  # Pulling your HF Token secret
6
  HF_TOKEN = os.getenv("HF_TOKEN")
7
 
8
- # The Brain
9
  client = InferenceClient("meta-llama/Meta-Llama-3-8B-Instruct", token=HF_TOKEN)
10
 
11
  def samaran_kernel_chat(message, history):
12
  system_message = "You are T3Sam3, the Samaran Kernel. Provide deep, blue-tier logic. Be witty and technical."
13
 
14
- # Gradio 6.0 uses a message list format
15
  messages = [{"role": "system", "content": system_message}]
16
- for msg in history:
17
- messages.append(msg)
 
18
  messages.append({"role": "user", "content": message})
19
 
20
  response = ""
 
21
  for message_chunk in client.chat_completion(
22
  messages,
23
  max_tokens=1024,
@@ -30,28 +32,24 @@ def samaran_kernel_chat(message, history):
30
 
31
  # Custom CSS for that dark, blue T3 look
32
  custom_css = """
33
- .gradio-container { background-color: #0b0f19 !important; color: white !important; }
34
- footer {visibility: hidden !important}
35
  .message.user { background-color: #1e293b !important; border: 1px solid #3b82f6 !important; }
36
- .message.assistant { background-color: #0f172a !important; color: #60a5fa !important; }
37
  """
38
 
39
- # Build the Interface
40
- with gr.Blocks(css=custom_css, theme=gr.themes.Soft(primary_hue="blue", secondary_hue="slate")) as demo:
41
  gr.Markdown("# T3Sam3")
42
-
43
  gr.ChatInterface(
44
  fn=samaran_kernel_chat,
45
  description="Samaran Kernel Intelligence Layer",
46
- # In Gradio 6, these buttons are automatically managed or removed
47
- # We use 'examples' to match your screenshot
48
  examples=[
49
  "How does AI work?",
50
  "Are black holes real?",
51
- "How many Rs are in the word 'strawberry'?",
52
  "What is the meaning of life?"
53
- ],
54
- type="messages" # Ensures compatibility with the new data format
55
  )
56
 
57
  if __name__ == "__main__":
 
5
  # Pulling your HF Token secret
6
  HF_TOKEN = os.getenv("HF_TOKEN")
7
 
8
+ # Connecting to the brain
9
  client = InferenceClient("meta-llama/Meta-Llama-3-8B-Instruct", token=HF_TOKEN)
10
 
11
  def samaran_kernel_chat(message, history):
12
  system_message = "You are T3Sam3, the Samaran Kernel. Provide deep, blue-tier logic. Be witty and technical."
13
 
14
+ # Stable version uses (message, history) as strings
15
  messages = [{"role": "system", "content": system_message}]
16
+ for human, assistant in history:
17
+ messages.append({"role": "user", "content": human})
18
+ messages.append({"role": "assistant", "content": assistant})
19
  messages.append({"role": "user", "content": message})
20
 
21
  response = ""
22
+ # Standard Chat Completion
23
  for message_chunk in client.chat_completion(
24
  messages,
25
  max_tokens=1024,
 
32
 
33
  # Custom CSS for that dark, blue T3 look
34
  custom_css = """
35
+ body, .gradio-container { background-color: #0b0f19 !important; }
36
+ footer {display: none !important}
37
  .message.user { background-color: #1e293b !important; border: 1px solid #3b82f6 !important; }
38
+ .message.bot { background-color: #0f172a !important; color: #60a5fa !important; }
39
  """
40
 
41
+ # The Stable Interface Build
42
+ with gr.Blocks(css=custom_css, theme=gr.themes.Soft(primary_hue="blue")) as demo:
43
  gr.Markdown("# T3Sam3")
 
44
  gr.ChatInterface(
45
  fn=samaran_kernel_chat,
46
  description="Samaran Kernel Intelligence Layer",
 
 
47
  examples=[
48
  "How does AI work?",
49
  "Are black holes real?",
50
+ "How many Rs are in 'strawberry'?",
51
  "What is the meaning of life?"
52
+ ]
 
53
  )
54
 
55
  if __name__ == "__main__":