File size: 2,396 Bytes
4a13628
674469e
 
4a13628
 
95cb26e
674469e
4a13628
95cb26e
 
 
3b2b211
95cb26e
674469e
 
 
95cb26e
e8aa76b
95cb26e
e8aa76b
95cb26e
 
 
4a13628
674469e
 
4a13628
674469e
95cb26e
4a13628
95cb26e
 
 
 
4a13628
674469e
4a13628
674469e
 
 
4a13628
674469e
 
 
95cb26e
674469e
 
 
95cb26e
e8aa76b
 
95cb26e
674469e
95cb26e
4a13628
 
674469e
95cb26e
 
 
674469e
 
 
 
 
 
 
95cb26e
674469e
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
import logging
from transformers import pipeline, Conversation
import random

logger = logging.getLogger(__name__)
chatbot_pipeline = None
conversation_history = {}

def load_chatbot_model():
    global chatbot_pipeline
    try:
        logger.info("Loading DialoGPT chatbot model...")
        chatbot_pipeline = pipeline(
            "conversational",
            model="microsoft/DialoGPT-medium",
            device="cpu"  # Use "cuda" if GPU available
        )
        logger.info("✓ Chatbot model loaded successfully")
    except Exception as e:
        logger.error(f"✗ Failed to load chatbot model: {str(e)}")
        chatbot_pipeline = None

async def get_chatbot_response(user_text: str, user_id: str = "default") -> str:
    """
    Generate chatbot response using DialoGPT.
    Maintains conversation history per user.
    """
    global chatbot_pipeline, conversation_history
    
    try:
        if chatbot_pipeline is None:
            load_chatbot_model()
            if chatbot_pipeline is None:
                return get_fallback_response(user_text)
        
        logger.info(f"Chatbot: Processing '{user_text}'")
        
        # Initialize conversation for this user if needed
        if user_id not in conversation_history:
            conversation_history[user_id] = Conversation()
        
        # Add user input to conversation
        conversation = conversation_history[user_id]
        conversation.add_user_input(user_text)
        
        # Generate response
        response = chatbot_pipeline(conversation)
        bot_response = response.generated_responses[-1].strip()
        
        if not bot_response:
            bot_response = get_fallback_response(user_text)
        
        logger.info(f"✓ Chatbot Response: '{bot_response}'")
        return bot_response
        
    except Exception as e:
        logger.error(f"✗ Chatbot Error: {str(e)}")
        return get_fallback_response(user_text)

def get_fallback_response(user_text: str) -> str:
    """Fallback responses when model fails"""
    responses = [
        f"I understand: '{user_text}'. How can I assist?",
        f"Interesting point about '{user_text}'. Tell me more?",
        f"Regarding '{user_text}', what would you like to know?",
        "I'm listening. Please continue.",
        f"That's a great question about '{user_text}'!"
    ]
    return random.choice(responses)