File size: 3,507 Bytes
44d1d8f
 
 
 
 
 
 
 
4bfdd8c
 
44d1d8f
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
af59843
44d1d8f
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
4bfdd8c
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
44d1d8f
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
import os
import openai
import gradio as gr
from llama_index import ServiceContext, StorageContext, load_index_from_storage, set_global_service_context
from llama_index.llms import OpenAI
from llama_index.text_splitter import TokenTextSplitter

from theme import CustomTheme
import time
import asyncio

# create storage context
storage_context = StorageContext.from_defaults(persist_dir="modulhandbuch")
# load index
index = load_index_from_storage(storage_context)

llm = OpenAI(temperature=0.1, model="gpt-4-1106-preview")
splitter =TokenTextSplitter(
     chunk_size=1024,
     chunk_overlap=128,
     separator=" "
)
service_context = ServiceContext.from_defaults(
     llm=llm, 
     text_splitter=splitter
)
set_global_service_context(service_context)

context = (
    "Context information is below.\n"
    "--------------\n"
    "{context_str}\n"
    "--------------\n"
    "Greet the user in a friendly way.\n"
    "Always keep the user on a first-name basis.\n"
    "Answer always in German and in a friendly, humorous matter.\n"
    "Keep the answers short and simple.\n"
    "Tell the user in a friendly way that you can only answer questions about the modules and courses in the study program Informatics and Design if they have questions about other topics.\n"
    "If the user asks a question that you cannot answer, tell them that you cannot answer the question and that they should contact the study program manager.\n"
    "Don't be afraid to ask the user to rephrase the question if you don't understand it.\n"
    "Don't repeat yourself.\n"
)

system_prompt =(
    "You are a study program manager."
)

query_engine = index.as_chat_engine(
    similarity_top_k = 5,
    chat_mode = "context",
    system_prompt = system_prompt,
    context_template = context,
    service_context = service_context,
)

default_text="Ich beantworte Fragen zum Modulhandbuch des Studiengangs Informatik und Design. Wie kann ich Dir helfen?"

bot_examples = [
    "Wer lehrt Mobile Anwendungen?",
    "Welche Prüfungsform hat das Modul Software Engineering?",
    "Wie viele Semesterwochenstunden hat das Modul Computational Thinking?",
]

submit_button = gr.Button(
        value="Ask MUC.DAI",
        elem_classes=["ask-button"],
)

def response(message, history):
    if message == "":
        answer = default_text
        return answer

    loop = asyncio.new_event_loop()
    asyncio.set_event_loop(loop)
    
    answer = query_engine.stream_chat(message, chat_history=query_engine.chat_history)

    text = ""
    for token in answer.response_gen:
        time.sleep(0.1)
        text += token

        yield text
    #print("message", message)
    #print("answer", answer)
    #print("history", history)
    #return answer


def main():
    #openai.api_key="sk-..."
    openai.api_key = os.environ["OPENAI_API_KEY"]

    custom_theme = CustomTheme()

    # default_text noch einbauen
    chatbot = gr.Chatbot(
        avatar_images=["assets/smile.png", "assets/mucdai.png"],
        layout='bubble',
        height=600,
        value=[[None, default_text]]
    )

    chat_interface = gr.ChatInterface(
        fn=response,
        retry_btn=None,
        undo_btn=None,
        title="MUC.DAI Informatik und Design - frag alles was Du wissen willst!",
        submit_btn=submit_button,
        theme=custom_theme,
        chatbot=chatbot,
        css="style.css",
        examples=bot_examples,
    )

    chat_interface.launch(inbrowser=True, debug=True)


if __name__ == "__main__":
    main()