File size: 2,091 Bytes
90291f3
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
7020a02
90291f3
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
import gradio as gr
import json
from langchain.chains import create_extraction_chain
from langchain.chains import create_tagging_chain
from langchain.chat_models import ChatOpenAI
from langchain.prompts import ChatPromptTemplate
from langchain.schema.output_parser import StrOutputParser


def process_inputs(input_text, schema_prompt, radio_choice):
    if radio_choice == "Extraction":
        return process_extract(input_text, schema_prompt)
    elif radio_choice == "Tagging":
        return process_tag(input_text, schema_prompt)
    else:
        return process_custom(input_text, schema_prompt)


def process_extract(input_text, schema_prompt):
    schema_json = json.loads(schema_prompt)
    chain = create_extraction_chain(schema_json, chat_model)
    llm_response = chain.run(input_text)
    pretty_json_string = json.dumps(llm_response, indent=4)
    return pretty_json_string


def process_tag(input_text, schema_prompt):
    schema_json = json.loads(schema_prompt)
    chain = create_tagging_chain(schema_json, chat_model)
    llm_response = chain.run(input_text)
    pretty_json_string = json.dumps(llm_response, indent=4)
    return pretty_json_string


def process_custom(input_text, schema_prompt):
    prompt = ChatPromptTemplate.from_template(schema_prompt)
    output_parser = StrOutputParser()
    chain = prompt | chat_model | output_parser
    invocation_dict = {"input_text": input_text}
    llm_response = chain.invoke(invocation_dict)
    return llm_response


chat_model = ChatOpenAI(temperature=0, model_name='gpt-3.5-turbo')

with gr.Blocks() as demo:
    input_text = gr.Textbox(label="Input Text")
    schema_prompt = gr.Textbox(label="Schema / Prompt")
    radio_choice = gr.Radio(["Extraction", "Tagging", "Custom Prompt"], label="Task")
    output = gr.Textbox(label="Result")
    analyze_btn = gr.Button("Analyze")
    analyze_btn.click(fn=process_inputs, inputs=[input_text, schema_prompt, radio_choice], outputs=output,
                      api_name="process_inputs")

if __name__ == "__main__":
    demo.launch(show_api=False, debug=True, share=True)