File size: 1,639 Bytes
8218847
 
 
 
 
14b8627
8218847
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
3723c07
14a5645
8218847
e6f0bd4
 
 
bb44bc1
 
8218847
14b8627
8218847
b603970
8218847
 
73151db
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
from gpt_index import SimpleDirectoryReader, GPTListIndex, GPTSimpleVectorIndex, LLMPredictor, PromptHelper
from langchain.chat_models import ChatOpenAI
import gradio as gr
import sys
import os
 
os.environ["OPENAI_API_KEY"] = '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'

def construct_index(directory_path):
    max_input_size = 4096
    num_outputs = 512
    max_chunk_overlap = 20
    chunk_size_limit = 600

    prompt_helper = PromptHelper(max_input_size, num_outputs, max_chunk_overlap, chunk_size_limit=chunk_size_limit)

    llm_predictor = LLMPredictor(llm=ChatOpenAI(temperature=0.7, model_name="gpt-3.5-turbo", max_tokens=num_outputs))

    documents = SimpleDirectoryReader(directory_path).load_data()

    index = GPTSimpleVectorIndex(documents, llm_predictor=llm_predictor, prompt_helper=prompt_helper)

    index.save_to_disk('index.json')

    return index

def chatbot(input_text):
    predetermined_text = "I want you to take the statement at the start of this query and answer it using  information contained in documents in the 'docs' directory.  I want you to answer as a highly experienced insurance industry expert."  
    input_text = input_text + predetermined_text
    index = GPTSimpleVectorIndex.load_from_disk('index.json')
    response = index.query(input_text, response_mode="compact")
    return response.response



iface = gr.Interface(fn=chatbot,
                     inputs=gr.components.Textbox(lines=7, label="What would you like to know about insurance?"),
                     outputs="text",
                     title="AI Loss Adjuster")

index = construct_index("docs")
iface.launch()