spanofzero commited on
Commit
3bb37cb
·
verified ·
1 Parent(s): 2a6aa8e
Files changed (1) hide show
  1. app.py +26 -20
app.py CHANGED
@@ -1,16 +1,18 @@
1
  import gradio as gr
2
- from huggingface_hub import InferenceClient
3
  import os
4
 
5
- # 1. Grab the secret password you just saved
6
- HF_TOKEN = os.getenv("HF_TOKEN")
7
 
8
- # 2. Connect to the AI brain
9
- client = InferenceClient("meta-llama/Meta-Llama-3-8B-Instruct", token=HF_TOKEN)
 
 
 
10
 
11
- # 3. The Samaran Kernel Logic
12
  def samaran_kernel_chat(message, history):
13
- system_message = "You are T3Sam3, the Samaran Kernel. Provide deep, blue-tier logic. Be witty and technical."
14
 
15
  messages = [{"role": "system", "content": system_message}]
16
  for human, assistant in history:
@@ -19,17 +21,21 @@ def samaran_kernel_chat(message, history):
19
  messages.append({"role": "user", "content": message})
20
 
21
  response = ""
22
- for message_chunk in client.chat_completion(
23
- messages,
24
- max_tokens=1024,
25
- stream=True,
26
- ):
27
- token = message_chunk.choices[0].delta.content
28
- if token:
29
- response += token
30
- yield response
 
 
 
 
31
 
32
- # 4. The Blue T3Sam3 Look
33
  custom_css = """
34
  body, .gradio-container { background-color: #0b0f19 !important; }
35
  footer {display: none !important}
@@ -37,9 +43,9 @@ footer {display: none !important}
37
  .message.bot { background-color: #0f172a !important; color: #60a5fa !important; }
38
  """
39
 
40
- # 5. Build the Interface
41
  with gr.Blocks(css=custom_css, theme=gr.themes.Soft(primary_hue="blue")) as demo:
42
- gr.Markdown("# T3Sam3")
43
  gr.ChatInterface(
44
  fn=samaran_kernel_chat,
45
  description="Samaran Kernel Intelligence Layer",
@@ -49,7 +55,7 @@ with gr.Blocks(css=custom_css, theme=gr.themes.Soft(primary_hue="blue")) as demo
49
  "How many Rs are in 'strawberry'?",
50
  "What is the meaning of life?"
51
  ],
52
- cache_examples=False # <--- This stops the startup crash
53
  )
54
 
55
  if __name__ == "__main__":
 
1
  import gradio as gr
2
+ from openai import OpenAI
3
  import os
4
 
5
+ # 1. Grab your OpenRouter Key
6
+ KIMI_KEY = os.getenv("KIMI_API_KEY")
7
 
8
+ # 2. Connect to the Kimi Engine via OpenRouter's English bridge
9
+ client = OpenAI(
10
+ api_key=KIMI_KEY,
11
+ base_url="https://openrouter.ai/api/v1" # This routes us through OpenRouter
12
+ )
13
 
 
14
  def samaran_kernel_chat(message, history):
15
+ system_message = "You are T3Sam3, the Samaran Kernel running on the Kimi engine. Provide deep, blue-tier logic. Be witty, highly accurate, and technical."
16
 
17
  messages = [{"role": "system", "content": system_message}]
18
  for human, assistant in history:
 
21
  messages.append({"role": "user", "content": message})
22
 
23
  response = ""
24
+ try:
25
+ # Requesting Kimi through OpenRouter
26
+ stream = client.chat.completions.create(
27
+ model="moonshotai/moonshot-v1-8k", # OpenRouter's specific tag for Kimi
28
+ messages=messages,
29
+ stream=True,
30
+ )
31
+ for chunk in stream:
32
+ if chunk.choices[0].delta.content is not None:
33
+ response += chunk.choices[0].delta.content
34
+ yield response
35
+ except Exception as e:
36
+ yield f"Kernel Error: Could not connect to Kimi via OpenRouter. Please check your KIMI_API_KEY and ensure you have credits loaded on OpenRouter. Details: {str(e)}"
37
 
38
+ # The Blue T3Sam3 Look
39
  custom_css = """
40
  body, .gradio-container { background-color: #0b0f19 !important; }
41
  footer {display: none !important}
 
43
  .message.bot { background-color: #0f172a !important; color: #60a5fa !important; }
44
  """
45
 
46
+ # Build the Interface
47
  with gr.Blocks(css=custom_css, theme=gr.themes.Soft(primary_hue="blue")) as demo:
48
+ gr.Markdown("# T3Sam3 (Powered by Kimi via OpenRouter)")
49
  gr.ChatInterface(
50
  fn=samaran_kernel_chat,
51
  description="Samaran Kernel Intelligence Layer",
 
55
  "How many Rs are in 'strawberry'?",
56
  "What is the meaning of life?"
57
  ],
58
+ cache_examples=False
59
  )
60
 
61
  if __name__ == "__main__":