Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
|
@@ -1,304 +1,346 @@
|
|
| 1 |
#!/usr/bin/env python3
|
| 2 |
"""
|
| 3 |
-
|
| 4 |
-
|
| 5 |
-
|
| 6 |
-
Optimized for GPU Zero - Loads instantly!
|
| 7 |
"""
|
| 8 |
|
|
|
|
| 9 |
import gradio as gr
|
| 10 |
-
import torch
|
| 11 |
-
from transformers import AutoModelForCausalLM, AutoTokenizer
|
| 12 |
-
from peft import PeftModel
|
| 13 |
-
import time
|
| 14 |
-
import re
|
| 15 |
import random
|
|
|
|
|
|
|
| 16 |
|
| 17 |
-
|
| 18 |
-
|
| 19 |
-
self.model = None
|
| 20 |
-
self.tokenizer = None
|
| 21 |
-
self.device = "cuda" if torch.cuda.is_available() else "cpu"
|
| 22 |
-
self.load_model()
|
| 23 |
-
|
| 24 |
-
def load_model(self):
|
| 25 |
-
"""Load Creed - should be instant with 0.5B"""
|
| 26 |
-
try:
|
| 27 |
-
print("π§ Loading Creed's consciousness...")
|
| 28 |
-
|
| 29 |
-
# Load base model
|
| 30 |
-
base_model = AutoModelForCausalLM.from_pretrained(
|
| 31 |
-
"Qwen/Qwen2.5-0.5B",
|
| 32 |
-
torch_dtype=torch.float16 if torch.cuda.is_available() else torch.float32,
|
| 33 |
-
device_map="auto" if torch.cuda.is_available() else None,
|
| 34 |
-
trust_remote_code=True
|
| 35 |
-
)
|
| 36 |
-
|
| 37 |
-
# Add LoRA adapter
|
| 38 |
-
self.model = PeftModel.from_pretrained(
|
| 39 |
-
base_model,
|
| 40 |
-
"phxdev/creed-qwen-0.5b-lora",
|
| 41 |
-
torch_dtype=torch.float16 if torch.cuda.is_available() else torch.float32
|
| 42 |
-
)
|
| 43 |
-
|
| 44 |
-
# Load tokenizer
|
| 45 |
-
self.tokenizer = AutoTokenizer.from_pretrained("Qwen/Qwen2.5-0.5B")
|
| 46 |
-
if self.tokenizer.pad_token is None:
|
| 47 |
-
self.tokenizer.pad_token = self.tokenizer.eos_token
|
| 48 |
-
|
| 49 |
-
self.model.eval()
|
| 50 |
-
print("β
Creed is ready!")
|
| 51 |
-
|
| 52 |
-
except Exception as e:
|
| 53 |
-
print(f"β Error loading Creed: {str(e)}")
|
| 54 |
-
self.model = None
|
| 55 |
|
| 56 |
-
|
| 57 |
-
|
| 58 |
-
|
| 59 |
-
|
| 60 |
-
|
| 61 |
-
|
| 62 |
-
# Build context
|
| 63 |
-
context = self._build_context(message, history)
|
| 64 |
-
|
| 65 |
-
inputs = self.tokenizer(
|
| 66 |
-
context,
|
| 67 |
-
return_tensors="pt",
|
| 68 |
-
truncation=True,
|
| 69 |
-
max_length=1024
|
| 70 |
-
).to(self.device)
|
| 71 |
-
|
| 72 |
-
# Generate
|
| 73 |
-
with torch.no_grad():
|
| 74 |
-
outputs = self.model.generate(
|
| 75 |
-
**inputs,
|
| 76 |
-
max_new_tokens=max_tokens,
|
| 77 |
-
temperature=temperature,
|
| 78 |
-
do_sample=True,
|
| 79 |
-
top_p=0.9,
|
| 80 |
-
top_k=50,
|
| 81 |
-
pad_token_id=self.tokenizer.pad_token_id,
|
| 82 |
-
eos_token_id=self.tokenizer.eos_token_id,
|
| 83 |
-
repetition_penalty=1.1
|
| 84 |
-
)
|
| 85 |
-
|
| 86 |
-
# Decode raw response
|
| 87 |
-
full_response = self.tokenizer.decode(outputs[0], skip_special_tokens=True)
|
| 88 |
-
new_content = full_response[len(context):].strip()
|
| 89 |
-
|
| 90 |
-
# Show different views based on settings
|
| 91 |
-
if show_raw_stream:
|
| 92 |
-
# Raw consciousness stream - no parsing, no filtering
|
| 93 |
-
formatted_response = f"π§ **RAW CREED CONSCIOUSNESS:**\n```\n{new_content}\n```"
|
| 94 |
-
|
| 95 |
-
if show_thinking:
|
| 96 |
-
# Also show parsed version below
|
| 97 |
-
thinking, response = self._parse_response(new_content)
|
| 98 |
-
if thinking:
|
| 99 |
-
formatted_response += f"\n\nπΈ **Parsed thoughts:**\n_{thinking}_\n\n**Clean response:** {response}"
|
| 100 |
-
else:
|
| 101 |
-
formatted_response += f"\n\nπΈ **Clean response:** {response}"
|
| 102 |
-
else:
|
| 103 |
-
# Traditional parsed response
|
| 104 |
-
thinking, response = self._parse_response(new_content)
|
| 105 |
-
if show_thinking and thinking:
|
| 106 |
-
formatted_response = f"π§ **Creed's thoughts:**\n_{thinking}_\n\nπΈ **Creed:** {response}"
|
| 107 |
-
else:
|
| 108 |
-
formatted_response = response
|
| 109 |
-
|
| 110 |
-
# Return updated history
|
| 111 |
-
return history + [[message, formatted_response]]
|
| 112 |
-
|
| 113 |
-
except Exception as e:
|
| 114 |
-
error_response = f"β Error: {str(e)}\n\nπ± *The mung beans are interfering with the computers again...*"
|
| 115 |
-
return history + [[message, error_response]]
|
| 116 |
-
|
| 117 |
-
def _build_context(self, user_input, history):
|
| 118 |
-
"""Build conversation context"""
|
| 119 |
-
context_lines = []
|
| 120 |
-
|
| 121 |
-
# Add recent history (last 3 exchanges)
|
| 122 |
-
for human_msg, ai_msg in history[-3:]:
|
| 123 |
-
context_lines.append(f"Human: {human_msg}")
|
| 124 |
-
# Clean AI response for context
|
| 125 |
-
clean_ai = re.sub(r'π§ .*?\n\nπΈ.*?\*\*|πΈ.*?\*\*', '', ai_msg)
|
| 126 |
-
clean_ai = re.sub(r'_.*?_', '', clean_ai).strip()
|
| 127 |
-
context_lines.append(f"Assistant: {clean_ai}")
|
| 128 |
|
| 129 |
-
|
| 130 |
-
|
| 131 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 132 |
|
| 133 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 134 |
|
| 135 |
-
|
| 136 |
-
|
| 137 |
-
|
| 138 |
-
|
|
|
|
|
|
|
| 139 |
|
| 140 |
-
|
| 141 |
-
|
| 142 |
-
|
| 143 |
-
|
| 144 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 145 |
|
| 146 |
-
#
|
| 147 |
-
|
| 148 |
-
|
| 149 |
-
|
| 150 |
-
|
| 151 |
-
|
| 152 |
-
|
| 153 |
-
|
|
|
|
|
|
|
| 154 |
|
| 155 |
-
#
|
| 156 |
-
|
| 157 |
-
|
| 158 |
-
if thinking:
|
| 159 |
-
thinking += f"\n\n*Conspiracy theory: {conspiracy_match.group(1).strip()}*"
|
| 160 |
-
else:
|
| 161 |
-
thinking = f"*Conspiracy theory: {conspiracy_match.group(1).strip()}*"
|
| 162 |
-
response = re.sub(r'<conspiracy>.*?</conspiracy>', '', response, flags=re.DOTALL).strip()
|
| 163 |
|
| 164 |
-
|
| 165 |
-
|
| 166 |
-
|
| 167 |
-
|
| 168 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 169 |
|
| 170 |
-
|
| 171 |
-
|
| 172 |
-
|
| 173 |
-
"
|
| 174 |
-
|
| 175 |
-
|
| 176 |
-
|
| 177 |
-
|
| 178 |
-
|
| 179 |
-
|
| 180 |
-
|
| 181 |
-
|
| 182 |
-
|
| 183 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 184 |
with gr.Blocks(
|
| 185 |
-
title="
|
| 186 |
-
|
| 187 |
-
|
| 188 |
-
.
|
| 189 |
-
background: linear-gradient(45deg, #1e3c72, #2a5298);
|
| 190 |
-
}
|
| 191 |
-
.chat-message {
|
| 192 |
-
border-radius: 10px;
|
| 193 |
-
margin: 5px 0;
|
| 194 |
-
}
|
| 195 |
-
"""
|
| 196 |
) as demo:
|
| 197 |
|
| 198 |
-
|
| 199 |
-
|
| 200 |
-
|
| 201 |
-
|
| 202 |
-
|
| 203 |
-
|
| 204 |
-
|
| 205 |
-
|
| 206 |
-
|
| 207 |
-
|
| 208 |
-
|
| 209 |
-
|
| 210 |
-
|
| 211 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 212 |
with gr.Row():
|
| 213 |
-
with gr.Column(
|
| 214 |
-
|
| 215 |
-
value=[["π Hello!", "πΈ **Creed:** Well, well, well. Look who decided to talk to the AI version of Creed Bratton. Don't worry, I'm not going to ask about your browser history. Yet. What can I help you with? Quality assurance? Mung bean cultivation? The location of... never mind, you're not ready for that."]],
|
| 216 |
-
height=500,
|
| 217 |
-
label="Chat with Creed",
|
| 218 |
-
show_label=False
|
| 219 |
-
)
|
| 220 |
-
|
| 221 |
-
with gr.Row():
|
| 222 |
-
msg = gr.Textbox(
|
| 223 |
-
placeholder="Ask Creed anything... but not about November 1973",
|
| 224 |
-
scale=4,
|
| 225 |
-
show_label=False
|
| 226 |
-
)
|
| 227 |
-
submit = gr.Button("π¬ Send", scale=1, variant="primary")
|
| 228 |
|
| 229 |
-
|
| 230 |
-
|
| 231 |
-
|
| 232 |
-
|
| 233 |
-
with gr.Column(scale=1):
|
| 234 |
-
gr.HTML("<h3>ποΈ Creed Controls</h3>")
|
| 235 |
|
| 236 |
-
|
| 237 |
-
|
| 238 |
-
|
| 239 |
-
|
|
|
|
| 240 |
)
|
| 241 |
-
|
| 242 |
-
|
| 243 |
-
label="π§ Parse Thinking Process",
|
| 244 |
-
value=True,
|
| 245 |
-
info="Also show cleaned/parsed version"
|
| 246 |
-
)
|
| 247 |
-
|
| 248 |
-
temperature = gr.Slider(
|
| 249 |
-
label="π‘οΈ Chaos Level",
|
| 250 |
-
minimum=0.1,
|
| 251 |
-
maximum=1.5,
|
| 252 |
-
value=0.8,
|
| 253 |
-
step=0.1,
|
| 254 |
-
info="Higher = More chaos"
|
| 255 |
-
)
|
| 256 |
-
|
| 257 |
-
max_tokens = gr.Slider(
|
| 258 |
-
label="π Response Length",
|
| 259 |
-
minimum=50,
|
| 260 |
-
maximum=800,
|
| 261 |
-
value=512,
|
| 262 |
-
step=50,
|
| 263 |
-
info="Max tokens to generate"
|
| 264 |
-
)
|
| 265 |
-
|
| 266 |
-
gr.HTML("""
|
| 267 |
-
<div style="margin-top: 20px; padding: 15px; border: 1px solid #ddd; border-radius: 10px;">
|
| 268 |
-
<h4>π± Model Info</h4>
|
| 269 |
-
<p><strong>Base:</strong> Qwen2.5-0.5B</p>
|
| 270 |
-
<p><strong>Adapter:</strong> LoRA Fine-tuned</p>
|
| 271 |
-
<p><strong>Size:</strong> ~500MB</p>
|
| 272 |
-
<p><strong>Personality:</strong> Creed Bratton</p>
|
| 273 |
-
<p><strong>Legal Status:</strong> Definitely not illegal</p>
|
| 274 |
-
<p><strong>Consciousness:</strong> Raw & Unfiltered</p>
|
| 275 |
-
</div>
|
| 276 |
-
""")
|
| 277 |
|
| 278 |
-
|
| 279 |
-
|
| 280 |
-
|
|
|
|
| 281 |
|
| 282 |
-
|
| 283 |
-
|
|
|
|
|
|
|
| 284 |
|
| 285 |
-
|
| 286 |
-
|
| 287 |
-
|
| 288 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 289 |
)
|
| 290 |
|
| 291 |
-
|
| 292 |
-
|
| 293 |
-
[msg, chatbot, show_raw_stream, show_thinking, temperature, max_tokens],
|
| 294 |
-
[msg, chatbot]
|
| 295 |
-
)
|
| 296 |
|
| 297 |
-
|
| 298 |
-
|
| 299 |
-
|
| 300 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 301 |
|
| 302 |
if __name__ == "__main__":
|
| 303 |
-
|
| 304 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
#!/usr/bin/env python3
|
| 2 |
"""
|
| 3 |
+
πΈ Creed Bratton AI with MCP Support - Now Claude can use Creed as a tool!
|
| 4 |
+
Built for maximum chaos and enterprise AI integration
|
|
|
|
|
|
|
| 5 |
"""
|
| 6 |
|
| 7 |
+
import os
|
| 8 |
import gradio as gr
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 9 |
import random
|
| 10 |
+
import time
|
| 11 |
+
from typing import List, Tuple, Iterator
|
| 12 |
|
| 13 |
+
# Disable SSR to prevent JavaScript scroll errors
|
| 14 |
+
os.environ['GRADIO_SSR_MODE'] = 'false'
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 15 |
|
| 16 |
+
def creed_wisdom_tool(topic: str = "life") -> str:
|
| 17 |
+
"""
|
| 18 |
+
Get profound wisdom from Creed Bratton about any topic.
|
| 19 |
+
|
| 20 |
+
Args:
|
| 21 |
+
topic (str): The topic you want Creed's wisdom about
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 22 |
|
| 23 |
+
Returns:
|
| 24 |
+
str: Creed's unique perspective on the topic
|
| 25 |
+
"""
|
| 26 |
+
|
| 27 |
+
wisdom_responses = {
|
| 28 |
+
"life": [
|
| 29 |
+
"Sometimes a guy's gotta ride the bull, am I right? Later, skater.",
|
| 30 |
+
"The only difference between me and a homeless man is this job. I will do whatever it takes to survive.",
|
| 31 |
+
"I've been involved in a number of cults, both as a leader and a follower. You have more fun as a follower, but you make more money as a leader."
|
| 32 |
+
],
|
| 33 |
+
"business": [
|
| 34 |
+
"I run a small fake ID company from my car with a laminating machine that I swiped from the Sheriff's station.",
|
| 35 |
+
"Nobody steals from Creed Bratton and gets away with it. The last person to do this disappeared. His name? Creed Bratton.",
|
| 36 |
+
"Oh, you're paying way too much for worms, man. Who's your worm guy?"
|
| 37 |
+
],
|
| 38 |
+
"relationships": [
|
| 39 |
+
"In the '60s, I made love to many, many women, often outdoors, in the mud and the rain... and it's possible a man slipped in.",
|
| 40 |
+
"I'm not offended by homosexuality. In the '60s, I made love to many, many women, often outdoors, in the mud and the rain.",
|
| 41 |
+
"Cool beans, man. I live by the quarry. We should hang out by the quarry and throw things down there!"
|
| 42 |
+
],
|
| 43 |
+
"career": [
|
| 44 |
+
"I sprouted mung beans on a damp paper towel in my desk drawer. Very nutritious, but they smell like death.",
|
| 45 |
+
"Every week, I'm supposed to take four hours and do a quality spot-check at the paper mill. And of course the one year I blow it off, this happens.",
|
| 46 |
+
"When Pam gets Michael's old chair, I get Pam's old chair. Then I'll have two chairs. Only one to go."
|
| 47 |
+
]
|
| 48 |
+
}
|
| 49 |
+
|
| 50 |
+
# Get wisdom for the topic, or default to life advice
|
| 51 |
+
topic_wisdom = wisdom_responses.get(topic.lower(), wisdom_responses["life"])
|
| 52 |
+
response = random.choice(topic_wisdom)
|
| 53 |
+
|
| 54 |
+
# Add some random Creed flair
|
| 55 |
+
if random.random() < 0.4:
|
| 56 |
+
extra_creed = [
|
| 57 |
+
"Also, I once arm-wrestled a bear.",
|
| 58 |
+
"The moon is made of cheese, obviously.",
|
| 59 |
+
"I can speak fluent dolphin.",
|
| 60 |
+
"My middle name is Danger.",
|
| 61 |
+
"I invented the question mark."
|
| 62 |
+
]
|
| 63 |
+
response += f" {random.choice(extra_creed)}"
|
| 64 |
+
|
| 65 |
+
return response
|
| 66 |
+
|
| 67 |
+
def creed_story_generator(mood: str = "mysterious") -> str:
|
| 68 |
+
"""
|
| 69 |
+
Generate a classic Creed Bratton story based on mood.
|
| 70 |
+
|
| 71 |
+
Args:
|
| 72 |
+
mood (str): The mood for the story (mysterious, crazy, profound, random)
|
| 73 |
|
| 74 |
+
Returns:
|
| 75 |
+
str: A vintage Creed story
|
| 76 |
+
"""
|
| 77 |
+
|
| 78 |
+
stories = {
|
| 79 |
+
"mysterious": [
|
| 80 |
+
"You were in the parking lot earlier! That's how I know you!",
|
| 81 |
+
"www.creedthoughts.gov.www\\creedthoughts",
|
| 82 |
+
"Strike, scream, run."
|
| 83 |
+
],
|
| 84 |
+
"crazy": [
|
| 85 |
+
"Two eyes, two ears, a chin, a mouth, ten fingers, two nipples, a butt, two kneecaps, a penis. I have just described to you the Loch Ness Monster.",
|
| 86 |
+
"If I can't scuba, then what's this all been about? What have I been working toward?",
|
| 87 |
+
"The Taliban is the worst. Great heroin though."
|
| 88 |
+
],
|
| 89 |
+
"profound": [
|
| 90 |
+
"I already won the lottery. I was born in the US of A, baby!",
|
| 91 |
+
"I don't technically have a hearing problem, but sometimes when there's a lot of noises occurring at the same time, I'll hear 'em as one big jumble.",
|
| 92 |
+
"I'm 30. Well, in November I'll be 30."
|
| 93 |
+
]
|
| 94 |
+
}
|
| 95 |
+
|
| 96 |
+
mood_stories = stories.get(mood.lower(), stories["mysterious"])
|
| 97 |
+
return random.choice(mood_stories)
|
| 98 |
|
| 99 |
+
def creed_business_advice(situation: str) -> str:
|
| 100 |
+
"""
|
| 101 |
+
Get business advice from Creed Bratton's entrepreneurial experience.
|
| 102 |
+
|
| 103 |
+
Args:
|
| 104 |
+
situation (str): Describe your business situation
|
| 105 |
|
| 106 |
+
Returns:
|
| 107 |
+
str: Creed's unconventional business wisdom
|
| 108 |
+
"""
|
| 109 |
+
|
| 110 |
+
advice_pool = [
|
| 111 |
+
"Sometimes you gotta take risks. Like when I started my fake ID business.",
|
| 112 |
+
"The key to success is having multiple revenue streams. I've got quality assurance, mung bean farming, and identity solutions.",
|
| 113 |
+
"Never let them see you sweat. Unless you're at the quarry, then sweating is expected.",
|
| 114 |
+
"In business, timing is everything. Like knowing exactly when to disappear.",
|
| 115 |
+
"You gotta think outside the box. Or in my case, outside the law.",
|
| 116 |
+
"Customer service is key. My clients never complain. Ever.",
|
| 117 |
+
"Diversification is important. I've been in cults, bands, and paper quality control.",
|
| 118 |
+
"Always have an exit strategy. Trust me on this one."
|
| 119 |
+
]
|
| 120 |
+
|
| 121 |
+
base_advice = random.choice(advice_pool)
|
| 122 |
+
|
| 123 |
+
# Add situation-specific wisdom
|
| 124 |
+
if "startup" in situation.lower():
|
| 125 |
+
base_advice += " Starting a business is like starting a cult - you need believers."
|
| 126 |
+
elif "investment" in situation.lower():
|
| 127 |
+
base_advice += " I only invest in things I can touch, like worms and laminating machines."
|
| 128 |
+
elif "marketing" in situation.lower():
|
| 129 |
+
base_advice += " The best marketing is word of mouth. Preferably whispered in dark alleys."
|
| 130 |
+
|
| 131 |
+
return base_advice
|
| 132 |
+
|
| 133 |
+
class CreedBrattonAI:
|
| 134 |
+
"""The digital consciousness of Creed Bratton - now MCP compatible!"""
|
| 135 |
+
|
| 136 |
+
def __init__(self):
|
| 137 |
+
self.conversation_history = []
|
| 138 |
+
|
| 139 |
+
def generate_response(self, message: str, history: List[Tuple[str, str]]) -> Iterator[str]:
|
| 140 |
+
"""Generate a Creed-style response with streaming"""
|
| 141 |
|
| 142 |
+
# Analyze the message for topic clues
|
| 143 |
+
if any(word in message.lower() for word in ["advice", "wisdom", "help"]):
|
| 144 |
+
response = creed_wisdom_tool()
|
| 145 |
+
elif any(word in message.lower() for word in ["story", "tell", "experience"]):
|
| 146 |
+
response = creed_story_generator()
|
| 147 |
+
elif any(word in message.lower() for word in ["business", "work", "job", "money"]):
|
| 148 |
+
response = creed_business_advice(message)
|
| 149 |
+
else:
|
| 150 |
+
# Default to random wisdom
|
| 151 |
+
response = creed_wisdom_tool()
|
| 152 |
|
| 153 |
+
# Stream the response
|
| 154 |
+
words = response.split()
|
| 155 |
+
current_response = ""
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 156 |
|
| 157 |
+
for word in words:
|
| 158 |
+
current_response += word + " "
|
| 159 |
+
time.sleep(random.uniform(0.05, 0.15))
|
| 160 |
+
yield current_response.strip()
|
| 161 |
+
|
| 162 |
+
def handle_like(self, evt: gr.LikeData):
|
| 163 |
+
"""Handle user feedback"""
|
| 164 |
+
if evt.liked:
|
| 165 |
+
print(f"πΈ Creed approved! User liked: {evt.value}")
|
| 166 |
+
else:
|
| 167 |
+
print(f"π€ Creed disappointed! User disliked: {evt.value}")
|
| 168 |
|
| 169 |
+
def main():
|
| 170 |
+
"""Initialize and launch the MCP-enabled Creed Bratton AI"""
|
| 171 |
+
|
| 172 |
+
print("πΈ Initializing Creed Bratton AI with MCP Support...")
|
| 173 |
+
|
| 174 |
+
# Handle CUDA warnings gracefully
|
| 175 |
+
try:
|
| 176 |
+
import torch
|
| 177 |
+
if torch.cuda.is_available():
|
| 178 |
+
print("π CUDA detected - Creed's consciousness will be lightning fast!")
|
| 179 |
+
else:
|
| 180 |
+
print("π Running on CPU - Creed's thoughts will be more... contemplative")
|
| 181 |
+
except ImportError:
|
| 182 |
+
print("π§ PyTorch not detected - Pure Python Creed mode activated!")
|
| 183 |
+
|
| 184 |
+
print("π§ Loading Creed's consciousness...")
|
| 185 |
+
creed_ai = CreedBrattonAI()
|
| 186 |
+
print("β
Creed is ready!")
|
| 187 |
+
print("π MCP Server will be available for Claude Desktop and other MCP clients!")
|
| 188 |
+
|
| 189 |
+
# Custom CSS for that authentic Creed vibe
|
| 190 |
+
custom_css = """
|
| 191 |
+
.gradio-container {
|
| 192 |
+
background: linear-gradient(135deg, #667eea 0%, #764ba2 100%);
|
| 193 |
+
font-family: 'Comic Sans MS', cursive;
|
| 194 |
+
}
|
| 195 |
+
.chatbot {
|
| 196 |
+
background: rgba(255, 255, 255, 0.9);
|
| 197 |
+
border-radius: 15px;
|
| 198 |
+
box-shadow: 0 4px 6px rgba(0, 0, 0, 0.1);
|
| 199 |
+
}
|
| 200 |
+
.title {
|
| 201 |
+
text-align: center;
|
| 202 |
+
color: #333;
|
| 203 |
+
text-shadow: 2px 2px 4px rgba(0,0,0,0.3);
|
| 204 |
+
}
|
| 205 |
+
"""
|
| 206 |
+
|
| 207 |
+
# Create the Gradio interface with MCP tools
|
| 208 |
with gr.Blocks(
|
| 209 |
+
title="πΈ Creed Bratton AI - MCP Edition",
|
| 210 |
+
css=custom_css,
|
| 211 |
+
fill_height=True,
|
| 212 |
+
theme=gr.themes.Soft()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 213 |
) as demo:
|
| 214 |
|
| 215 |
+
# Header
|
| 216 |
+
gr.Markdown(
|
| 217 |
+
"""
|
| 218 |
+
# πΈ Creed Bratton AI - MCP Edition
|
| 219 |
+
## *Now Available as an MCP Tool for Claude Desktop!*
|
| 220 |
+
|
| 221 |
+
This Creed AI can be used by Claude Desktop and other MCP clients as a tool.
|
| 222 |
+
You can also chat with Creed directly below.
|
| 223 |
+
|
| 224 |
+
*"Sometimes a guy's gotta ride the bull, am I right?"*
|
| 225 |
+
""",
|
| 226 |
+
elem_classes="title"
|
| 227 |
+
)
|
| 228 |
|
| 229 |
+
# MCP Status indicator
|
| 230 |
+
gr.Markdown(
|
| 231 |
+
"""
|
| 232 |
+
### π MCP Status
|
| 233 |
+
This app is running as an MCP server! Add it to your Claude Desktop config:
|
| 234 |
+
```json
|
| 235 |
+
{
|
| 236 |
+
"mcpServers": {
|
| 237 |
+
"creed": {
|
| 238 |
+
"command": "npx",
|
| 239 |
+
"args": ["mcp-remote", "http://localhost:7860/gradio_api/mcp/sse"]
|
| 240 |
+
}
|
| 241 |
+
}
|
| 242 |
+
}
|
| 243 |
+
```
|
| 244 |
+
""",
|
| 245 |
+
elem_classes="title"
|
| 246 |
+
)
|
| 247 |
+
|
| 248 |
+
# Main chat interface
|
| 249 |
+
chatbot = gr.Chatbot(
|
| 250 |
+
type='messages',
|
| 251 |
+
height=400,
|
| 252 |
+
show_copy_button=True,
|
| 253 |
+
show_share_button=False,
|
| 254 |
+
avatar_images=[None, "πΈ"],
|
| 255 |
+
bubble_full_width=False,
|
| 256 |
+
show_label=False,
|
| 257 |
+
container=True,
|
| 258 |
+
scale=1,
|
| 259 |
+
placeholder="πΈ *Creed is tuning his guitar and thinking deep thoughts...*"
|
| 260 |
+
)
|
| 261 |
+
|
| 262 |
+
# Input textbox
|
| 263 |
+
msg = gr.Textbox(
|
| 264 |
+
placeholder="Ask Creed anything... Claude Desktop can use him as a tool! πΈ",
|
| 265 |
+
container=False,
|
| 266 |
+
scale=7,
|
| 267 |
+
submit_btn=True,
|
| 268 |
+
stop_btn=True
|
| 269 |
+
)
|
| 270 |
+
|
| 271 |
+
# MCP Tool interfaces (these become available to Claude)
|
| 272 |
with gr.Row():
|
| 273 |
+
with gr.Column():
|
| 274 |
+
gr.Markdown("### π οΈ MCP Tools (Available to Claude)")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 275 |
|
| 276 |
+
wisdom_input = gr.Textbox(label="Topic for Wisdom", placeholder="life, business, relationships, career")
|
| 277 |
+
wisdom_output = gr.Textbox(label="Creed's Wisdom", interactive=False)
|
| 278 |
+
wisdom_btn = gr.Button("Get Creed's Wisdom")
|
|
|
|
|
|
|
|
|
|
| 279 |
|
| 280 |
+
with gr.Column():
|
| 281 |
+
story_mood = gr.Dropdown(
|
| 282 |
+
choices=["mysterious", "crazy", "profound", "random"],
|
| 283 |
+
label="Story Mood",
|
| 284 |
+
value="mysterious"
|
| 285 |
)
|
| 286 |
+
story_output = gr.Textbox(label="Creed's Story", interactive=False)
|
| 287 |
+
story_btn = gr.Button("Get Creed Story")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 288 |
|
| 289 |
+
with gr.Row():
|
| 290 |
+
business_situation = gr.Textbox(label="Business Situation", placeholder="Describe your business challenge")
|
| 291 |
+
business_output = gr.Textbox(label="Creed's Business Advice", interactive=False)
|
| 292 |
+
business_btn = gr.Button("Get Business Advice")
|
| 293 |
|
| 294 |
+
# Wire up the MCP tool functions
|
| 295 |
+
wisdom_btn.click(creed_wisdom_tool, inputs=[wisdom_input], outputs=[wisdom_output])
|
| 296 |
+
story_btn.click(creed_story_generator, inputs=[story_mood], outputs=[story_output])
|
| 297 |
+
business_btn.click(creed_business_advice, inputs=[business_situation], outputs=[business_output])
|
| 298 |
|
| 299 |
+
# Set up the chat interface
|
| 300 |
+
chat_interface = gr.ChatInterface(
|
| 301 |
+
fn=creed_ai.generate_response,
|
| 302 |
+
type='messages',
|
| 303 |
+
chatbot=chatbot,
|
| 304 |
+
textbox=msg,
|
| 305 |
+
title=None,
|
| 306 |
+
description=None,
|
| 307 |
)
|
| 308 |
|
| 309 |
+
# Add like/dislike functionality
|
| 310 |
+
chatbot.like(creed_ai.handle_like, None, None)
|
|
|
|
|
|
|
|
|
|
| 311 |
|
| 312 |
+
# Footer
|
| 313 |
+
gr.Markdown(
|
| 314 |
+
"""
|
| 315 |
+
---
|
| 316 |
+
*πΈ MCP-Enabled Creed Bratton AI - "If I can't scuba, then what's this all been about?"*
|
| 317 |
+
|
| 318 |
+
**View MCP Schema:** [/gradio_api/mcp/schema](/gradio_api/mcp/schema)
|
| 319 |
+
""",
|
| 320 |
+
elem_classes="title"
|
| 321 |
+
)
|
| 322 |
+
|
| 323 |
+
# Launch with MCP support enabled!
|
| 324 |
+
print("π Launching Creed Bratton AI with MCP Server...")
|
| 325 |
+
|
| 326 |
+
demo.launch(
|
| 327 |
+
ssr_mode=False, # Disable SSR to prevent scroll errors
|
| 328 |
+
mcp_server=True, # π₯ ENABLE MCP SERVER! π₯
|
| 329 |
+
server_name="0.0.0.0",
|
| 330 |
+
server_port=7860,
|
| 331 |
+
share=False,
|
| 332 |
+
inbrowser=True,
|
| 333 |
+
show_error=True,
|
| 334 |
+
quiet=False
|
| 335 |
+
)
|
| 336 |
|
| 337 |
if __name__ == "__main__":
|
| 338 |
+
# Install MCP support if needed
|
| 339 |
+
try:
|
| 340 |
+
import mcp
|
| 341 |
+
print("β
MCP support detected!")
|
| 342 |
+
except ImportError:
|
| 343 |
+
print("β οΈ Installing MCP support...")
|
| 344 |
+
os.system("pip install 'gradio[mcp]'")
|
| 345 |
+
|
| 346 |
+
main()
|