Spaces:
Sleeping
Sleeping
File size: 4,534 Bytes
9fb3586 de2021f 9fb3586 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 |
import streamlit as st
import os
import sys
import asyncio
# Add backend to path for imports
backend_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'backend')
sys.path.insert(0, backend_path)
# Use lightweight character manager for HuggingFace Spaces
from backend.models.lightweight_character_manager import CharacterManager
# Page config
st.set_page_config(
page_title="π Roleplay Chat Box",
page_icon="π",
layout="wide"
)
# Initialize session state
if 'character_manager' not in st.session_state:
st.session_state.character_manager = None
if 'messages' not in st.session_state:
st.session_state.messages = []
if 'current_character' not in st.session_state:
st.session_state.current_character = 'moses'
def initialize_models():
"""Initialize the character manager"""
if st.session_state.character_manager is None:
with st.spinner("π Loading character models..."):
try:
st.session_state.character_manager = CharacterManager()
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
loop.run_until_complete(st.session_state.character_manager.initialize())
loop.close()
st.success("β
Models loaded successfully!")
return True
except Exception as e:
st.error(f"β Error loading models: {str(e)}")
import traceback
st.error(traceback.format_exc())
return False
return True
# Sidebar for character selection
with st.sidebar:
st.title("π Characters")
character = st.radio(
"Choose Character",
options=["moses", "samsung_employee", "jinx"],
format_func=lambda x: {
"moses": "π Moses - Biblical Prophet",
"samsung_employee": "πΌ Samsung Employee - Tech Expert",
"jinx": "π₯ Jinx - Chaotic Genius"
}[x],
key="character_selector"
)
# Update character if changed
if character != st.session_state.current_character:
st.session_state.current_character = character
st.session_state.messages = []
st.divider()
if st.button("ποΈ Clear Chat"):
st.session_state.messages = []
st.rerun()
st.divider()
st.markdown("### About")
st.markdown("""
This app uses LoRA (Low-Rank Adaptation) to create unique character personalities.
- **Base Model**: Qwen3-0.6B
- **Adapters**: Character-specific LoRA weights
- **Memory Efficient**: Shares one base model
""")
# Main chat interface
st.title("π Roleplay Chat Box")
st.markdown(f"Currently chatting with: **{st.session_state.current_character}**")
# Initialize models on first run
if not initialize_models():
st.stop()
# Display chat messages
for message in st.session_state.messages:
with st.chat_message(message["role"]):
st.markdown(message["content"])
# Chat input
if prompt := st.chat_input("Type your message here..."):
# Add user message to chat
st.session_state.messages.append({"role": "user", "content": prompt})
with st.chat_message("user"):
st.markdown(prompt)
# Generate response
with st.chat_message("assistant"):
with st.spinner("Thinking..."):
try:
# Convert history for context
conversation_history = []
for msg in st.session_state.messages[-6:]: # Last 3 exchanges
if msg["role"] == "user":
conversation_history.append({"role": "user", "content": msg["content"]})
else:
conversation_history.append({"role": "assistant", "content": msg["content"]})
# Generate response
response = st.session_state.character_manager.generate_response(
character_id=st.session_state.current_character,
user_message=prompt,
conversation_history=conversation_history[:-1] # Exclude current message
)
st.markdown(response)
st.session_state.messages.append({"role": "assistant", "content": response})
except Exception as e:
error_msg = f"β Error: {str(e)}"
st.error(error_msg)
st.session_state.messages.append({"role": "assistant", "content": error_msg})
|