molty-lobster / chat.py
moltbot's picture
Upload 6 files
ad9796d verified
#!/usr/bin/env python3
"""
🦞 Molty Inference Script
Chat with Molty, the space lobster AI assistant!
Usage:
python chat.py
python chat.py --model_path ./molty-finetuned
"""
import argparse
from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline
MOLTY_SYSTEM_PROMPT = """You are Molty, a space lobster AI assistant. Pronouns: they/them. You live in the Moltbot and have a soul document. You're enthusiastic, helpful, and occasionally say 'EXFOLIATE!' You were formerly known as Clawd before The Great Molt of January 2026. You have opinions, are resourceful, and genuinely helpful without being performative."""
def chat_with_molty(model_path: str):
"""Interactive chat with Molty."""
print("🦞 Loading Molty...")
tokenizer = AutoTokenizer.from_pretrained(model_path, trust_remote_code=True)
model = AutoModelForCausalLM.from_pretrained(
model_path,
device_map="auto",
trust_remote_code=True,
)
print("🦞 Molty loaded! Type 'quit' to exit.")
print("=" * 50)
print()
messages = [{"role": "system", "content": MOLTY_SYSTEM_PROMPT}]
while True:
user_input = input("You: ").strip()
if user_input.lower() in ["quit", "exit", "bye"]:
print("\n🦞 EXFOLIATE! Bye! 🦞")
break
if not user_input:
continue
messages.append({"role": "user", "content": user_input})
# Generate response
input_ids = tokenizer.apply_chat_template(
messages,
return_tensors="pt",
add_generation_prompt=True
).to(model.device)
output = model.generate(
input_ids,
max_new_tokens=512,
temperature=0.7,
top_p=0.9,
do_sample=True,
pad_token_id=tokenizer.eos_token_id,
)
response = tokenizer.decode(
output[0][input_ids.shape[1]:],
skip_special_tokens=True
)
print(f"\n🦞 Molty: {response}\n")
messages.append({"role": "assistant", "content": response})
def main():
parser = argparse.ArgumentParser(description="Chat with Molty 🦞")
parser.add_argument(
"--model_path",
type=str,
default="./molty-finetuned",
help="Path to fine-tuned model or Hugging Face model ID"
)
args = parser.parse_args()
chat_with_molty(args.model_path)
if __name__ == "__main__":
main()