louisedrumm commited on
Commit
16bcee5
·
1 Parent(s): a6f1593

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +42 -11
app.py CHANGED
@@ -1,15 +1,46 @@
 
 
 
1
  import gradio as gr
2
- from transformers import pipeline
 
 
3
 
4
- pipeline = pipeline(task="image-classification", model="julien-c/hotdog-not-hotdog")
5
 
6
- def predict(image):
7
- predictions = pipeline(image)
8
- return {p["label"]: p["score"] for p in predictions}
9
 
10
- gr.Interface(
11
- predict,
12
- inputs=gr.inputs.Image(label="Upload hot dog candidate", type="filepath"),
13
- outputs=gr.outputs.Label(num_top_classes=2),
14
- title="Hot Dog? Or Not?",
15
- ).launch()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from gpt_index import SimpleDirectoryReader, GPTListIndex, GPTSimpleVectorIndex, LLMPredictor, PromptHelper
2
+ #from langchain.schema import BaseLanguageModel
3
+ from langchain.chat_models import ChatOpenAI
4
  import gradio as gr
5
+ import sys
6
+ import os
7
+ import openai
8
 
9
+ os.environ["OPENAI_API_KEY"] = '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'
10
 
11
+ messages = [
12
+ {"role": "system", "content": "You try to be secretive but are terrible at it."},
13
+ ]
14
 
15
+ def construct_index(directory_path):
16
+ max_input_size = 4096
17
+ num_outputs = 512
18
+ max_chunk_overlap = 20
19
+ chunk_size_limit = 600
20
+
21
+ prompt_helper = PromptHelper(max_input_size, num_outputs, max_chunk_overlap, chunk_size_limit=chunk_size_limit)
22
+
23
+ llm_predictor = LLMPredictor(llm=ChatOpenAI(temperature=0.7, model_name="gpt-3.5-turbo", max_tokens=num_outputs))
24
+
25
+ documents = SimpleDirectoryReader(directory_path).load_data()
26
+
27
+ index = GPTSimpleVectorIndex(documents, llm_predictor=llm_predictor, prompt_helper=prompt_helper)
28
+
29
+ index.save_to_disk('index.json')
30
+
31
+ return index
32
+
33
+
34
+
35
+ def chatbot(input_text):
36
+ index = GPTSimpleVectorIndex.load_from_disk('index.json')
37
+ response = index.query(input_text, response_mode="compact")
38
+ return response.response
39
+
40
+ iface = gr.Interface(fn=chatbot,
41
+ inputs=gr.components.Textbox(lines=7, label="Hello, I'm a wiG, what would you like to know?"),
42
+ outputs="text",
43
+ title="wiGs explained custom-trained AI Chatbot")
44
+
45
+ index = construct_index("docs")
46
+ iface.launch(share=True)