stephenmccartney1234 commited on
Commit
55337be
·
verified ·
1 Parent(s): 1aebd33

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +33 -44
app.py CHANGED
@@ -1,50 +1,39 @@
1
- from gpt_index import SimpleDirectoryReader, GPTSimpleVectorIndex, LLMPredictor, PromptHelper
2
- from langchain.chat_models import ChatOpenAI
3
  import gradio as gr
 
4
  import os
5
 
6
- # Set your OpenAI API key here
7
  os.environ["OPENAI_API_KEY"] = os.getenv("OPENAI_API_KEY")
8
 
9
- def construct_index(directory_path):
10
- max_input_size = 4096
11
- num_outputs = 512
12
- max_chunk_overlap = 20
13
- chunk_size_limit = 600
14
-
15
- # Initialize the prompt helper
16
- prompt_helper = PromptHelper(max_input_size, num_outputs, max_chunk_overlap, chunk_size_limit=chunk_size_limit)
17
-
18
- # Initialize the LLM predictor with an updated model name if available
19
- #llm_predictor = LLMPredictor(llm=ChatOpenAI(temperature=0.7, model_name="gpt-4", max_tokens=num_outputs)) # Updated to GPT-4 or latest available model
20
- #llm_predictor = LLMPredictor(llm=ChatOpenAI(temperature=0.7, model_name="gpt-3.5-turbo", max_tokens=num_outputs))
21
- llm_predictor = LLMPredictor(llm=ChatOpenAI(temperature=0.7, model_name="gpt-4", max_tokens=num_outputs))
22
-
23
- # Load documents from the directory
24
- documents = SimpleDirectoryReader(directory_path).load_data()
25
 
26
- # Create and populate the index
27
- index = GPTSimpleVectorIndex(documents, llm_predictor=llm_predictor, prompt_helper=prompt_helper)
28
- index.save_to_disk('index.json')
29
-
30
- return index
31
-
32
- def chatbot(input_text):
33
- # Load the index from disk
34
- index = GPTSimpleVectorIndex.load_from_disk('index.json')
35
-
36
- # Query the index and return the response
37
- response = index.query(input_text, response_mode="compact")
38
- return response.response
39
-
40
- # Set up the Gradio interface
41
- iface = gr.Interface(fn=chatbot,
42
- inputs=gr.components.Textbox(lines=7, label="Enter your text"),
43
- outputs="text",
44
- title="Custom-trained AI Chatbot")
45
-
46
- # Construct the index with your documents directory
47
- index = construct_index("docs")
48
-
49
- # Launch the Gradio interface
50
- iface.launch(share=False)
 
 
 
1
  import gradio as gr
2
+ import openai
3
  import os
4
 
5
+ # Ensure the OPENAI_API_KEY environment variable is set correctly
6
  os.environ["OPENAI_API_KEY"] = os.getenv("OPENAI_API_KEY")
7
 
8
+ def generate_text(prompt):
9
+ # Add custom text to the prompt
10
+ input_text = (prompt + " You are an expert with many years of experience in the insurance loss adjusting business. "
11
+ "Can you give your expert opinion on the question or statement made at the start of this prompt? "
12
+ "Answer it in terms of the laws, regulations, and terms of the insurance industry. "
13
+ "Make it accurate, but written in a simple and clear style. "
14
+ "Please provide references or insurance law precedents where possible.")
 
 
 
 
 
 
 
 
 
15
 
16
+ # Generate text using OpenAI API. Adjust "model" to the latest available or desired model.
17
+ response = openai.Completion.create(
18
+ model="gpt-3.5-turbo", # Update this to the latest model you wish to use
19
+ prompt=input_text,
20
+ max_tokens=1024,
21
+ n=1,
22
+ stop=None,
23
+ temperature=0.7
24
+ )
25
+
26
+ # Return the generated text
27
+ return response.choices[0].text.strip()
28
+
29
+ # Define the Gradio interface using the updated syntax
30
+ iface = gr.Interface(
31
+ fn=generate_text,
32
+ inputs=gr.Textbox(lines=5, label="What do you wish to know about insurance?"),
33
+ outputs=gr.Textbox(label="Generated text:"),
34
+ title="AI Adjuster",
35
+ description="AI tool to help you understand and answer any insurance-related queries."
36
+ )
37
+
38
+ # Run the Gradio app
39
+ iface.launch()