File size: 3,401 Bytes
698506e
5a8de81
698506e
 
 
 
 
5a8de81
698506e
 
 
5a8de81
698506e
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
# app.py
import gradio as gr
import requests
from datetime import datetime
import json
import os
import uuid

# Hugging Face API token
HF_API_TOKEN = "YOUR_HF_API_TOKEN"
headers = {"Authorization": f"Bearer {HF_API_TOKEN}"}

# Folder to store per-user conversation histories
HISTORY_FOLDER = "histories"
os.makedirs(HISTORY_FOLDER, exist_ok=True)

# Available models
MODELS = {
    "GPT-2": "gpt2",
    "GPT-J": "EleutherAI/gpt-j-6B",
    "GPT-NeoX": "EleutherAI/gpt-neox-20b"
}

def load_user_history(session_id):
    path = os.path.join(HISTORY_FOLDER, f"{session_id}.json")
    if os.path.exists(path):
        with open(path, "r") as f:
            return json.load(f)
    return []

def save_user_history(session_id, history):
    path = os.path.join(HISTORY_FOLDER, f"{session_id}.json")
    with open(path, "w") as f:
        json.dump(history, f, indent=2)

def ai_assistant(user_text, uploaded_file, model_name, session_id=None):
    # Generate session_id if not provided
    if session_id is None:
        session_id = str(uuid.uuid4())

    # Load this session's conversation
    conversation_history = load_user_history(session_id)

    # Combine file content if uploaded
    if uploaded_file:
        file_content = uploaded_file.read().decode("utf-8")
        user_text = f"{user_text}\n\nFile content:\n{file_content}"

    # Append user input
    conversation_history.append(f"User: {user_text}")

    # Prepare prompt with conversation history
    prompt = "\n".join(conversation_history) + "\nAI:"

    # Call Hugging Face Inference API
    payload = {"inputs": prompt}
    model_id = MODELS.get(model_name, "gpt2")
    response = requests.post(
        f"https://api-inference.huggingface.co/models/{model_id}",
        headers=headers,
        json=payload
    )

    if response.status_code == 200:
        result = response.json()
        ai_text = result[0]["generated_text"] if isinstance(result, list) else str(result)
    else:
        ai_text = f"⚠️ Error: {response.status_code} - {response.text}"

    # Append AI response
    conversation_history.append(f"AI: {ai_text}")

    # Save updated history for this session
    save_user_history(session_id, conversation_history)

    # Add timestamp and word count
    timestamp = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
    word_count = len(ai_text.split())
    display_text = f"{ai_text}\n\n🕒 Timestamp: {timestamp}"

    return display_text, f"Word count: {word_count}", session_id

# Gradio Interface
demo = gr.Interface(
    fn=ai_assistant,
    inputs=[
        gr.Textbox(label="Enter text", placeholder="Type your message here..."),
        gr.File(label="Upload a text file (optional)"),
        gr.Dropdown(label="Select AI Model", choices=list(MODELS.keys()), value="GPT-2"),
        gr.Textbox(label="Session ID (auto-generated)", placeholder="Leave blank for new session", optional=True)
    ],
    outputs=[
        gr.Markdown(label="AI Response"),
        gr.Textbox(label="Word Count"),
        gr.Textbox(label="Session ID")
    ],
    title="Session-Based Stateful GPT Assistant",
    description=(
        "Chat with an AI assistant with automatic session tracking. "
        "Each session has its own conversation memory. Upload files, choose models, "
        "and see timestamps & word counts. Session ID is returned for returning users."
    )
)

if __name__ == "__main__":
    demo.launch()