File size: 1,346 Bytes
d607c88
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
import gradio as gr
import openai
import urllib.parse
import tkinter as tk
from tkinter import messagebox

openai.api_key = "YOUR_OPENAI_API_KEY"

def raise_issue(e, model, prompt):
    issue_title = urllib.parse.quote("[bug] Hosted Gorilla: <Issue>")
    issue_body = urllib.parse.quote(f"Exception: {e}\nFailed model: {model}, for prompt: {prompt}")
    issue_url = f"https://github.com/ShishirPatil/gorilla/issues/new?assignees=&labels=hosted-gorilla&projects=&template=hosted-gorilla-.md&title={issue_title}&body={issue_body}"
    print(f"An exception has occurred: {e}\nPlease raise an issue here: {issue_url}")

def get_gorilla_response(prompt, model):
    try:
        completion = openai.ChatCompletion.create(
            model=model,
            messages=[{"role": "user", "content": prompt}]
        )
        return completion.choices[0].message.content
    except Exception as e:
        raise_issue(e, model, prompt)

iface = gr.Interface(
    fn=get_gorilla_response,
    inputs=["text", gr.inputs.Dropdown(["gorilla-7b-hf-v1", "gorilla-7b-tf-v0", "gorilla-7b-th-v0"])],
    outputs=gr.outputs.Textbox(label="Response"),
    live=False,
    title="Gorilla Response App",
    description="Enter a query and select a Gorilla model to get a response."
)

# Launch the interface
if __name__ == "__main__":
    iface.launch(share=True)