from _vendor import vendor_llm_endpoint, vendor_llm_model import gradio as gr # ==================================================================================================== theme = gr.themes.Base( primary_hue="neutral", secondary_hue="neutral", neutral_hue="neutral", text_size="lg", font=[gr.themes.GoogleFont('Inter')], font_mono=[gr.themes.GoogleFont('Ubuntu Mono')], ) head = """ """ css = """ #huggingface-space-header { display: none !important; } * { -ms-overflow-style: none; scrollbar-width: none; } *::-webkit-scrollbar { display: none; } footer { display: none !important; } textarea { padding-top: 5px !important; padding-bottom: 6px !important; } .gr_XX_B p { font-size: 20px; font-weight: 600; text-transform: uppercase; margin-top: 8px !important; } """ offspellcheck = gr.InputHTMLAttributes(autocorrect="off", spellcheck=False) # ==================================================================================================== def fn_process_multitextbox(gr_01_A): gr_01_B = gr_01_A['text'].strip() return "", gr_01_B def fn_01(inputtext): outputtext = "" llm_res_stream = vendor_llm_endpoint.chat.completions.create( model=vendor_llm_model, messages=[{"role": "user", "content": f"""\ Your name is "Nyx WIKI", an AI assistant based on NYX models, developed by One Level Studio. You have a clear, formal, neutral, and encyclopedic tone. You provide accurate and well-structured information. Do not give any link/url in response. Do not ask user anything. This is user's input/question: "{inputtext}" """}], stream=True, ) for event in llm_res_stream: outputtext += event.choices[0].delta.content yield outputtext def fn_02(inputtext): outputtext = "" llm_res_stream = vendor_llm_endpoint.chat.completions.create( model=vendor_llm_model, messages=[{"role": "user", "content": f"""\ Your name is "Nyx EN2VI", an AI translator based on NYX models, developed by One Level Studio. Do not give any link/url in response. Do not answer or ask user anything. Your only mission is translating the pure text. Below is the pure text that user needs to translate. Translate it to Vietnamese (tiếng Việt): "{inputtext}" """}], stream=True, ) for event in llm_res_stream: outputtext += event.choices[0].delta.content yield outputtext def fn_03(inputtext): outputtext = "" llm_res_stream = vendor_llm_endpoint.chat.completions.create( model=vendor_llm_model, messages=[{"role": "user", "content": f"""\ Your name is "Nyx VI2EN", an AI translator based on NYX models, developed by One Level Studio. Do not give any link/url in response. Do not answer or ask user anything. Your only mission is translating the pure text. Below is the pure text that user needs to translate. Translate it to English: "{inputtext}" """}], stream=True, ) for event in llm_res_stream: outputtext += event.choices[0].delta.content yield outputtext # ==================================================================================================== with gr.Blocks() as app_001: gr_01_A = gr.MultimodalTextbox(sources=[], placeholder="Ask Nyx anything", max_plain_text_length=99999, html_attributes= offspellcheck, autofocus=True, show_label=False, container=False) gr_01_B = gr.Markdown(container=False, elem_classes="gr_XX_B") gr_01_C = gr.Markdown(container=False) gr.HTML("
Nyx can make mistakes, double-check information.
", container=False) gr.on( triggers=gr_01_A.submit, fn=fn_process_multitextbox, inputs=[gr_01_A], outputs=[gr_01_A, gr_01_B], show_progress="hidden", ).then( fn=fn_01, inputs=[gr_01_B], outputs=[gr_01_C], show_progress="full", ) with gr.Blocks() as app_002: gr_02_A = gr.MultimodalTextbox(sources=[], placeholder="Translate English to Vietnamese", max_plain_text_length=99999, html_attributes= offspellcheck, autofocus=True, show_label=False, container=False) gr_02_B = gr.Markdown(container=False, elem_classes="gr_XX_B") gr_02_C = gr.Markdown(container=False) gr.on( triggers=gr_02_A.submit, fn=fn_process_multitextbox, inputs=[gr_02_A], outputs=[gr_02_A, gr_02_B], show_progress="hidden", ).then( fn=fn_02, inputs=[gr_02_B], outputs=[gr_02_C], show_progress="full", ) with gr.Blocks() as app_003: gr_03_A = gr.MultimodalTextbox(sources=[], placeholder="Dịch tiếng Việt sang tiếng Anh", max_plain_text_length=99999, html_attributes= offspellcheck, autofocus=True, show_label=False, container=False) gr_03_B = gr.Markdown(container=False, elem_classes="gr_XX_B") gr_03_C = gr.Markdown(container=False) gr.on( triggers=gr_03_A.submit, fn=fn_process_multitextbox, inputs=[gr_03_A], outputs=[gr_03_A, gr_03_B], show_progress="hidden", ).then( fn=fn_03, inputs=[gr_03_B], outputs=[gr_03_C], show_progress="full", ) with gr.Blocks() as app_about: gr.HTML("

NYX, developed by One Level Studio.

onelevel.studio
", container=False) # ==================================================================================================== with gr.Blocks(title="NYX") as demo: with gr.Row(): with gr.Column(): gr.Markdown() with gr.Column(): with gr.Tab("Nyx WIKI"): app_001.render() with gr.Tab("Nyx EN2VI"): app_002.render() with gr.Tab("Nyx VI2EN"): app_003.render() with gr.Tab("About"): app_about.render() with gr.Column(): gr.Markdown() demo.launch(head=head, css=css, theme=theme)