File size: 2,508 Bytes
131b251
 
 
 
 
 
 
 
1fe02e8
 
 
 
 
 
 
 
4323e44
1fe02e8
 
 
 
061e71d
 
1fe02e8
 
 
 
7714696
1fe02e8
 
 
 
 
 
 
 
 
 
 
e98f1a2
671a6be
1fe02e8
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
061e71d
 
 
 
 
1fe02e8
 
061e71d
91b3676
061e71d
 
 
 
 
1fe02e8
 
7714696
3172412
73d97cc
0c42ee1
7714696
 
 
 
 
3e3bd02
 
 
1fe02e8
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
import subprocess
import sys

def install(package):
    subprocess.check_call([sys.executable, "-m", "pip", "install", package])

install("openai==0.28")

import json
from pathlib import Path
import gradio as gr
import os
import shutil
import json
import openai

api_key = os.getenv("api_key")

# Load the OpenAI API key
openai.api_key = api_key

project_data = {}

def ask_chatbot(query, chat_history, project_data):
    # Convert project_data to a string format (optional: simplify or summarize if needed)
    project_data_str = json.dumps(project_data, indent=2)

    if len(chat_history) >= 10:
        chat_history.pop(0)

    query_with_history = ""
    for question, answer in chat_history:
        query_with_history += f"\nUser: {question}\nAssistant: {answer}"

    query_with_history += f"\nUser: {query}"


    # Define the messages
    messages = [
        {"role": "system", "content": "You are a virtual project management assistant. \
                                        Analyzing the given project information for a website redesign project, you have to answer project managers' questions.:"},
        {"role": "system", "content": project_data_str},
        {"role": "user", "content": query_with_history}
    ]

    # Call the OpenAI API
    response = openai.ChatCompletion.create(
        model="gpt-3.5-turbo",
        messages=messages,
        max_tokens=512
    )

    # Extract the response text
    answer = response.choices[0].message['content'].strip()

    return answer

def generate_response(message, history):
    # project_data = {}
    # if os.path.exists("/content/project_data.json"):
    #   with open("/content/project_data.json", 'r') as f:
    #     project_data = json.load(f)
    return ask_chatbot(message, history, project_data)

def upload_file(data_file):
    with open(data_file.name, "r") as f:
        data = json.load(f)
    gr.Info("Project file Uploaded. You can now query the document")
    # with open("/content/project_data.json", "w") as f:
    #   json.dump(data, f)
    global project_data
    project_data = data

with gr.Blocks() as demo:
    gr.ChatInterface(
        generate_response,
        chatbot=gr.Chatbot(height=500),
        title="ProManage",
        description="Virtual Project Management Assistant",
        theme="soft",
        undo_btn="Delete Previous",
        clear_btn="Clear",
    )
    with gr.Column():
        u = gr.UploadButton("Upload a file", file_count="single")
        u.upload(upload_file, u)

demo.launch()