import argparse import json from src.gradio_app import run_summarization_model_gradio from src.model_loading import load_open_ai_llm parser = argparse.ArgumentParser(description="Defines a configuration") parser.add_argument( "--configuration_file", "-cf", type=str, default="configuration/deployment.json", help="Path to the configuration file. The configuration can be used for a single source of configuration.", ) if __name__ == "__main__": args = parser.parse_args() # Default configuration for summarization # Load configuration from a configuration file with open(args.configuration_file, "r") as cgf: cgf_kwargs = json.load(cgf) summarization_kwargs = cgf_kwargs.get( "summarization_kwargs", {"chain_type": "map_reduce"} ) # Load the language model llm = load_open_ai_llm( model_loading_kwargs=cgf_kwargs.get("language_model_kwargs", {}) ) # Run the summarization model in gradio run_summarization_model_gradio( llm=llm, share_gradio_via_link=cgf_kwargs.get("share_gradio_via_link", False), summarization_kwargs=summarization_kwargs, run_local=cgf_kwargs.get("run_local", True), )