File size: 4,021 Bytes
b46567f
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
import gradio as gr
import requests
import json
import os

# --- Gemini API Configuration ---
# The API key will be automatically provided by the Canvas environment at runtime
# if left as an empty string. DO NOT hardcode your API key here.
API_KEY = "" # Leave as empty string for Canvas environment
API_URL = "https://generativelanguage.googleapis.com/v1beta/models/gemini-2.0-flash:generateContent"

# --- Function to call the Gemini API ---
async def call_gemini_api(prompt: str) -> str:
    """
    Calls the Gemini API with the given prompt and returns the generated text.
    """
    headers = {
        'Content-Type': 'application/json',
    }
    payload = {
        "contents": [{"role": "user", "parts": [{"text": prompt}]}],
    }

    # Append API key to URL if available, otherwise it's handled by Canvas runtime
    full_api_url = f"{API_URL}?key={API_KEY}" if API_KEY else API_URL

    try:
        # Use requests.post for synchronous call, or aiohttp for async if needed
        # For Gradio, direct requests.post within the function is usually fine.
        response = requests.post(full_api_url, headers=headers, data=json.dumps(payload))
        response.raise_for_status() # Raise an exception for HTTP errors
        
        result = response.json()

        if result.get("candidates") and len(result["candidates"]) > 0 and \
           result["candidates"][0].get("content") and \
           result["candidates"][0]["content"].get("parts") and \
           len(result["candidates"][0]["content"]["parts"]) > 0:
            return result["candidates"][0]["content"]["parts"][0]["text"]
        else:
            return "No content generated by the model."
    except requests.exceptions.RequestException as e:
        return f"API Call Error: {e}"
    except json.JSONDecodeError:
        return f"API Response Error: Could not decode JSON. Response: {response.text}"
    except Exception as e:
        return f"An unexpected error occurred: {e}"

# --- Gradio Interface Function ---
async def analyze_chat_conversation(chat_text: str, analysis_task: str) -> str:
    """
    Analyzes the chat conversation based on the selected task using an LLM.
    """
    if not chat_text.strip():
        return "Please enter a chat conversation to analyze."

    prompt = ""
    if analysis_task == "Summarize":
        prompt = f"Summarize the following chat conversation:\n\n{chat_text}\n\nSummary:"
    elif analysis_task == "Sentiment Analysis":
        prompt = f"Analyze the overall sentiment of the following chat conversation (e.g., positive, negative, neutral, mixed). Explain your reasoning briefly:\n\n{chat_text}\n\nSentiment Analysis:"
    elif analysis_task == "Extract Key Points & Action Items":
        prompt = f"Extract the main discussion points and any explicit action items from the following chat conversation. Present them as a bulleted list:\n\n{chat_text}\n\nKey Points and Action Items:"
    else:
        return "Invalid analysis task selected."

    # Call the Gemini API
    response_text = await call_gemini_api(prompt)
    return response_text

# --- Gradio Interface Definition ---
demo = gr.Interface(
    fn=analyze_chat_conversation,
    inputs=[
        gr.Textbox(lines=10, label="Paste Chat Conversation Here", placeholder="e.g., 'Alice: Let's meet tomorrow. Bob: Sure, 10 AM? Alice: Yes, and please bring the report. Bob: Will do.'"),
        gr.Dropdown(
            ["Summarize", "Sentiment Analysis", "Extract Key Points & Action Items"],
            label="Select Analysis Task",
            value="Summarize"
        )
    ],
    outputs=gr.Textbox(label="Analysis Result", lines=15),
    title="💬 Chat Conversation Analyzer (Powered by Gemini)",
    description="Paste your chat conversation, select an analysis task, and get insights from an AI."
)

# --- Launch the Gradio App ---
if __name__ == "__main__":
    # For local testing, use demo.launch()
    # For Hugging Face Spaces, ensure `gradio` and `requests` are in requirements.txt
    demo.launch()