| import gradio as gr |
| from neon_llm_chatgpt.chatgpt import ChatGPT |
| from neon_llm_palm2.palm2 import Palm2 |
| from neon_llm_gemini.gemini import Gemini |
| from neon_llm_claude.claude import Claude |
| from utils import convert_history, PersonaConverter |
| import os |
|
|
|
|
|
|
| role = "" |
| biography = "" |
|
|
|
|
| key = os.environ['OPENAI_API_KEY'] |
| config = { |
| "key": key, |
| "model": "gpt-3.5-turbo", |
| "role": role, |
| "context_depth": 3, |
| "max_tokens": 256, |
| } |
| chatgpt = ChatGPT(config) |
|
|
| key_google = os.environ['GOOGLE_API_KEY'] |
| key_path = "./vertex.json" |
|
|
| with open(key_path, "w") as json_file: |
| json_file.write(key_google) |
|
|
| config = { |
| "key_path": key_path, |
| "role": role, |
| "context_depth": 3, |
| "max_tokens": 256, |
| } |
| palm2 = Palm2(config) |
|
|
| config = { |
| "model": "gemini-pro", |
| "key_path": key_path, |
| "role": role, |
| "context_depth": 3, |
| "max_tokens": 1024, |
| } |
| gemini = Gemini(config) |
|
|
| key_anthropic = os.environ['ANTHROPIC_API_KEY'] |
| config = { |
| "key": key_anthropic, |
| "openai_key": key, |
| "model": "claude-2", |
| "role": role, |
| "context_depth": 3, |
| "max_tokens": 256, |
| } |
| claude = Claude(config) |
|
|
| model_choices = { |
| "openai": chatgpt, |
| "palm2": palm2, |
| "gemini": gemini, |
| "anthropic": claude, |
| } |
| model_choices_list = list(model_choices.keys()) |
|
|
|
|
| personaConverter = PersonaConverter() |
| preset_choices_list = list(personaConverter.personas.keys()) |
| figures_choices_list = list(personaConverter.historical_figures.keys()) |
|
|
| def ask(message, history, persona, model_name, preset, biography, figure, imaginary, short): |
| chat_history = convert_history(history) |
| model = model_choices[model_name] |
| persona_description = personaConverter(name = preset, figure = figure, |
| description = persona, biography = biography, |
| imaginary = imaginary, short = short) |
| responce = model.ask(message, chat_history, persona = {"description": persona_description}) |
|
|
| return responce |
|
|
|
|
| demo = gr.ChatInterface(ask, |
| additional_inputs=[ |
| gr.Textbox(role, label="Persona"), |
| gr.Dropdown(choices=model_choices_list, value=model_choices_list[0], label="Model"), |
| gr.Dropdown(choices=preset_choices_list, value=preset_choices_list[0], label="Preset"), |
| gr.Textbox(biography, label="Biography"), |
| gr.Dropdown(choices=figures_choices_list, value=figures_choices_list[0], label="Historical Figures"), |
| gr.Checkbox(value=False, label="Imaginary"), |
| gr.Checkbox(value=True, label="Short response"), |
| ] |
| ) |
|
|
| if __name__ == "__main__": |
| demo.queue().launch() |