Spaces:
Sleeping
Sleeping
| import gradio as gr | |
| from backend import BackEnd | |
| import argparse | |
| import configparser | |
| class Demo: | |
| def __init__(self, config): | |
| self.config = config | |
| self.backend = BackEnd(self.config) | |
| self.lang = self.config['General']['language'].lower()[:2] | |
| def process_query(self,history, query): | |
| res,context = self.backend.process_query(query) | |
| documents = '\n\n'.join(context) | |
| if self.lang == 'fr': | |
| response = res['réponse'] | |
| rationale = res['justification'] | |
| elif lang == 'en': | |
| response = res['answer'] | |
| rationale = res['rationale'] | |
| source = res['source'] | |
| history.append((query,response))#, "See details below))")) | |
| return history, "",response, rationale, source, documents | |
| def run_demo(self): | |
| if self.lang == 'fr': | |
| with gr.Blocks(theme=gr.themes.Glass()) as demo: | |
| gr.Image(value = 'crim_logo.png') | |
| gr.Markdown("## Démonstration d'IA générative") | |
| chatbot = gr.Chatbot(label="Conversation", height=400) | |
| gr.Markdown("Saisissez une requête ci-dessous et voyez la réponse et le raisonnement du système.") | |
| with gr.Row(): | |
| query_input = gr.Textbox( | |
| show_label=False, | |
| placeholder="Tapez quelque chose ...", | |
| lines=1 | |
| ) | |
| send_btn = gr.Button("Envoyer", scale = 0) | |
| gr.Markdown("### Dernière réponse") | |
| with gr.Row(): | |
| answer_output = gr.Textbox(label="Réponse", lines = 3, interactive=False) | |
| with gr.Row(): | |
| reasoning_output = gr.Textbox(label="Raisonnement du système", lines = 5,interactive=False) | |
| with gr.Row(): | |
| source_output = gr.Textbox(label="Source", interactive=False) | |
| with gr.Accordion("Documents récupérés", open=False): | |
| docs_output = gr.Textbox(label="Documents justificatifs", interactive=False, lines=30) | |
| inputs = [chatbot, query_input] | |
| outputs = [chatbot, query_input, answer_output, reasoning_output, source_output,docs_output] | |
| query_input.submit(fn=self.process_query, inputs=inputs, outputs=outputs) | |
| send_btn.click(fn=self.process_query, inputs=inputs, outputs=outputs) | |
| elif self.lang == 'en': | |
| with gr.Blocks(theme=gr.themes.Glass()) as demo: | |
| gr.Image(value = 'crim_logo.png') | |
| gr.Markdown("## Generative AI Chat Demo with Structured Outputs") | |
| chatbot = gr.Chatbot(label="Conversation", height=400) | |
| with gr.Row(): | |
| query_input = gr.Textbox( | |
| show_label=False, | |
| placeholder="Type your query here and press Enter...", | |
| lines=1 | |
| ) | |
| send_btn = gr.Button("Send", scale = 0) | |
| gr.Markdown("### Latest Response Details") | |
| with gr.Row(): | |
| answer_output = gr.Textbox(label="Answer", interactive=False) | |
| with gr.Row(): | |
| reasoning_output = gr.Textbox(label="System Reasoning", interactive=False) | |
| with gr.Row(): | |
| source_output = gr.Textbox(label="Source", interactive=False) | |
| with gr.Accordion("Retrieved Documents", open=False): | |
| docs_output = gr.Textbox(label="Supporting Documents", interactive=False, lines=30) | |
| inputs = [chatbot, query_input] | |
| outputs = [chatbot, query_input, answer_output, reasoning_output, source_output,docs_output] | |
| query_input.submit(fn=self.process_query, inputs=inputs, outputs=outputs) | |
| send_btn.click(fn=self.process_query, inputs=inputs, outputs=outputs) | |
| demo.launch() | |
| def main(): | |
| parser = argparse.ArgumentParser() | |
| # parser.add_argument('--config_file', type=str, required=True, help='File containing the configuration for the backend (in .ini format)') | |
| # args = parser.parse_args() | |
| config = configparser.ConfigParser() | |
| config.read('config.ini') | |
| demo = Demo(config) | |
| demo.run_demo() | |
| if __name__ == "__main__": | |
| main() | |