| { | |
| "type": "ONNXModel", | |
| "config": { | |
| "model_path": "/content/Olive/examples/whisper/models/conversion-transformers_optimization-onnx_dynamic_quantization-insert_beam_search-prepost/whisper_cpu_int8_cpu-cpu_model.onnx", | |
| "onnx_file_name": null, | |
| "inference_settings": null, | |
| "use_ort_extensions": true, | |
| "hf_config": { | |
| "model_name": "openai/whisper-base.en", | |
| "task": null, | |
| "feature": null, | |
| "model_class": "WhisperForConditionalGeneration", | |
| "components": [ | |
| { | |
| "name": "encoder_decoder_init", | |
| "io_config": "get_encdec_io_config", | |
| "component_func": "get_encoder_decoder_init", | |
| "dummy_inputs_func": "encoder_decoder_init_dummy_inputs" | |
| }, | |
| { | |
| "name": "decoder", | |
| "io_config": "get_dec_io_config", | |
| "component_func": "get_decoder", | |
| "dummy_inputs_func": "decoder_dummy_inputs" | |
| } | |
| ], | |
| "dataset": null, | |
| "model_loading_args": null | |
| } | |
| } | |
| } |