spanofzero commited on
Commit
a56a90d
·
verified ·
1 Parent(s): 7ca962e

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +20 -16
app.py CHANGED
@@ -2,14 +2,14 @@ import gradio as gr
2
  from huggingface_hub import InferenceClient
3
  import os
4
 
5
- # Pulling your HF Token secret
6
  HF_TOKEN = os.getenv("HF_TOKEN")
7
 
8
- # Connecting to the brain (Llama-3-8B)
9
  client = InferenceClient("meta-llama/Meta-Llama-3-8B-Instruct", token=HF_TOKEN)
10
 
11
  def samaran_kernel_chat(message, history):
12
- system_message = "You are the Samaran Kernel (T3Sam3). Provide deep, technical, yet witty insights. You are a blue-themed upgrade to the standard T3 Chat."
13
 
14
  messages = [{"role": "system", "content": system_message}]
15
  for user_msg, ai_msg in history:
@@ -28,26 +28,30 @@ def samaran_kernel_chat(message, history):
28
  response += token
29
  yield response
30
 
31
- # Custom CSS to match the 't3.chat' layout with blue hues
32
  custom_css = """
33
- .gradio-container { background-color: #0b0f19 !important; }
34
- footer {visibility: hidden}
35
- #component-0 { border: none !important; }
36
- .message.user { background-color: #1e293b !important; border-radius: 10px !important; }
37
- .message.bot { background-color: #0f172a !important; border-radius: 10px !important; color: #60a5fa !important; }
38
- button.primary { background: linear-gradient(90deg, #2563eb, #3b82f6) !important; border: none !important; }
39
  """
40
 
41
- # The T3Sam3 Interface
42
- with gr.Blocks(css=custom_css, theme=gr.themes.Soft(primary_hue="blue", secondary_hue="slate")) as demo:
43
  gr.Markdown("# T3Sam3")
44
  gr.ChatInterface(
45
  fn=samaran_kernel_chat,
46
- description="Samaran Kernel Intelligence Layer - Blue Tier Edition",
47
- clear_btn=None,
48
- undo_btn=None,
 
49
  retry_btn=None,
 
50
  )
51
 
 
52
  if __name__ == "__main__":
53
- demo.launch()
 
 
 
 
2
  from huggingface_hub import InferenceClient
3
  import os
4
 
5
+ # Secret Token from your Settings
6
  HF_TOKEN = os.getenv("HF_TOKEN")
7
 
8
+ # The Brain: Llama-3-8B
9
  client = InferenceClient("meta-llama/Meta-Llama-3-8B-Instruct", token=HF_TOKEN)
10
 
11
  def samaran_kernel_chat(message, history):
12
+ system_message = "You are T3Sam3, the Samaran Kernel. You are a blue-themed, high-logic upgrade. Be concise and witty."
13
 
14
  messages = [{"role": "system", "content": system_message}]
15
  for user_msg, ai_msg in history:
 
28
  response += token
29
  yield response
30
 
31
+ # Custom CSS for that dark, blue T3 look
32
  custom_css = """
33
+ .gradio-container { background-color: #0b0f19 !important; color: white !important; }
34
+ footer {visibility: hidden !important}
35
+ .message.user { background-color: #1e293b !important; }
36
+ .message.bot { background-color: #0f172a !important; color: #60a5fa !important; }
 
 
37
  """
38
 
39
+ # Build the Interface
40
+ with gr.Blocks() as demo:
41
  gr.Markdown("# T3Sam3")
42
  gr.ChatInterface(
43
  fn=samaran_kernel_chat,
44
+ description="Samaran Kernel Intelligence Layer",
45
+ # Updated button names for Gradio 6.0
46
+ clear_btn=None,
47
+ undo_btn=None,
48
  retry_btn=None,
49
+ examples=["How does AI work?", "Are black holes real?", "What is the meaning of life?"]
50
  )
51
 
52
+ # Launch with the theme and CSS passed here (the new way)
53
  if __name__ == "__main__":
54
+ demo.launch(
55
+ theme=gr.themes.Soft(primary_hue="blue", secondary_hue="slate"),
56
+ css=custom_css
57
+ )