import gradio as gr from gradio_client import Client title = """# 🙋🏻‍♂️Welcome to tonic's openai⚒️connector for 🐣e5-mistral🛌🏻 this⚒️connector looks like openai embeddings but uses 🐣e5-mistral🛌🏻 - so you can use it as a drop in replacement for the open ai api. Both the inputs and outputs exactly match what is⚒️expected from openai's api so anything already⚒️compatible with that api is now compative with 🐣e5-mistral🛌🏻 """ client = Client("https://tonic-e5.hf.space/--replicas/w3v1e/") def get_embeddings(task, input_text): try: result = client.predict( task, input_text, api_name="/compute_embeddings" ) return result except Exception as e: return str(e) def format_response(embeddings, model): return { "data": [ { "embedding": embeddings, "index": 0, "object": "embedding" } ], "model": model, "object": "list", "usage": { "prompt_tokens": 17, "total_tokens": 17 } } def generate_embeddings(input_text, model, encoding_format, user): embeddings = get_embeddings(model, input_text) formatted_response = format_response(embeddings, model) return formatted_response with gr.Blocks() as app: gr.Markdown(title) with gr.Row(): input_text = gr.Textbox(label="Input Text", placeholder="Enter text or array of texts") model = gr.Dropdown(label="Model", choices=["ArguAna", "ClimateFEVER", "DBPedia", "FEVER", "FiQA2018", "HotpotQA", "MSMARCO", "NFCorpus", "NQ", "QuoraRetrieval", "SCIDOCS", "SciFact", "Touche2020", "TRECCOVID"], value="text-embedding-ada-002") encoding_format = gr.Radio(label="Encoding Format", choices=["float", "base64"], value="float") user = gr.Textbox(label="User", placeholder="Enter user identifier (optional)") submit_button = gr.Button("Generate Embeddings") output = gr.JSON(label="Embeddings Output") submit_button.click( fn=generate_embeddings, inputs=[input_text, model, encoding_format, user], outputs=output ) app.launch()