|
|
import logging |
|
|
from transformers import pipeline, Conversation |
|
|
import random |
|
|
|
|
|
logger = logging.getLogger(__name__) |
|
|
chatbot_pipeline = None |
|
|
conversation_history = {} |
|
|
|
|
|
def load_chatbot_model(): |
|
|
global chatbot_pipeline |
|
|
try: |
|
|
logger.info("Loading DialoGPT chatbot model...") |
|
|
chatbot_pipeline = pipeline( |
|
|
"conversational", |
|
|
model="microsoft/DialoGPT-medium", |
|
|
device="cpu" |
|
|
) |
|
|
logger.info("β Chatbot model loaded successfully") |
|
|
except Exception as e: |
|
|
logger.error(f"β Failed to load chatbot model: {str(e)}") |
|
|
chatbot_pipeline = None |
|
|
|
|
|
async def get_chatbot_response(user_text: str, user_id: str = "default") -> str: |
|
|
""" |
|
|
Generate chatbot response using DialoGPT. |
|
|
Maintains conversation history per user. |
|
|
""" |
|
|
global chatbot_pipeline, conversation_history |
|
|
|
|
|
try: |
|
|
if chatbot_pipeline is None: |
|
|
load_chatbot_model() |
|
|
if chatbot_pipeline is None: |
|
|
return get_fallback_response(user_text) |
|
|
|
|
|
logger.info(f"Chatbot: Processing '{user_text}'") |
|
|
|
|
|
|
|
|
if user_id not in conversation_history: |
|
|
conversation_history[user_id] = Conversation() |
|
|
|
|
|
|
|
|
conversation = conversation_history[user_id] |
|
|
conversation.add_user_input(user_text) |
|
|
|
|
|
|
|
|
response = chatbot_pipeline(conversation) |
|
|
bot_response = response.generated_responses[-1].strip() |
|
|
|
|
|
if not bot_response: |
|
|
bot_response = get_fallback_response(user_text) |
|
|
|
|
|
logger.info(f"β Chatbot Response: '{bot_response}'") |
|
|
return bot_response |
|
|
|
|
|
except Exception as e: |
|
|
logger.error(f"β Chatbot Error: {str(e)}") |
|
|
return get_fallback_response(user_text) |
|
|
|
|
|
def get_fallback_response(user_text: str) -> str: |
|
|
"""Fallback responses when model fails""" |
|
|
responses = [ |
|
|
f"I understand: '{user_text}'. How can I assist?", |
|
|
f"Interesting point about '{user_text}'. Tell me more?", |
|
|
f"Regarding '{user_text}', what would you like to know?", |
|
|
"I'm listening. Please continue.", |
|
|
f"That's a great question about '{user_text}'!" |
|
|
] |
|
|
return random.choice(responses) |