dgarrett-synaptics commited on
Commit
f8c314d
·
verified ·
1 Parent(s): 8cafa32

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +18 -14
app.py CHANGED
@@ -25,20 +25,21 @@ def get_oauth_info(profile: gr.OAuthProfile | None, oauth_token: gr.OAuthToken |
25
  return print(f'{profile.username}: {org_names}')
26
 
27
 
28
- def compile_model(model_name, sram_size, tensor_size):
29
 
30
  if oauth_info['token'] is None:
31
  return "ERROR - please log into HuggingFace to continue"
32
 
33
  # Run the comparison
34
  out_dir = './tmp'
35
- model_loc = 'sram'
36
 
37
  # Run the model fitter
38
  results = sr100_model_compiler.sr100_model_compiler(
39
  model_file=model_name,
40
  output_dir=f"{out_dir}",
41
- model_loc=f"{model_loc}",
 
 
42
  )
43
  print(results)
44
 
@@ -60,27 +61,30 @@ def compile_model(model_name, sram_size, tensor_size):
60
 
61
  #def process_data(slider_value):
62
  # return slider_value * 2
63
-
 
64
 
65
  with gr.Blocks() as demo:
66
  gr.LoginButton()
67
  text1 = gr.Markdown("SR100 Model Compiler - Compile a tflite model to SR100")
68
-
69
 
70
- # Deploy the two slides
71
- sram_slider = gr.Slider(minimum=0, maximum=3, step=0.1, label="Set total SRAM size available in MB", value=3)
72
- tensor_slider = gr.Slider(minimum=0, maximum=3, step=0.1, label="Set the SRAM size for tensor calculations in MB", value=1.5)
73
- #output_text = gr.Textbox(label="Output")
 
 
74
 
75
- model_text = gr.Textbox(label='TFlite model', value='hello_world.tflite')
76
 
 
 
77
  compile_btn = gr.Button("Compile Model")
78
- compile_text = gr.Markdown("Waiting for model")
79
- user_text = gr.Markdown("")
80
 
81
  # Compute options
82
- compile_btn.click(compile_model, inputs=[model_text, sram_slider, tensor_slider], outputs=[compile_text])
83
- #my_slider.change(fn=process_data, inputs=my_slider, outputs=output_text)
84
  demo.load(get_oauth_info, inputs=None, outputs=user_text)
85
 
86
  if __name__ == "__main__":
 
25
  return print(f'{profile.username}: {org_names}')
26
 
27
 
28
+ def compile_model(model_name, sram_size, tensor_size, optimize, model_loc):
29
 
30
  if oauth_info['token'] is None:
31
  return "ERROR - please log into HuggingFace to continue"
32
 
33
  # Run the comparison
34
  out_dir = './tmp'
 
35
 
36
  # Run the model fitter
37
  results = sr100_model_compiler.sr100_model_compiler(
38
  model_file=model_name,
39
  output_dir=f"{out_dir}",
40
+ model_loc=model_loc,
41
+ optimize=optimize,
42
+ arena_cache_size=int(float(tensor_size)*1.0e6)
43
  )
44
  print(results)
45
 
 
61
 
62
  #def process_data(slider_value):
63
  # return slider_value * 2
64
+ def update_sliders(sram_slider_value):
65
+ return gr.update(maximum=sram_slider_value, value=sram_slider_value>>1)
66
 
67
  with gr.Blocks() as demo:
68
  gr.LoginButton()
69
  text1 = gr.Markdown("SR100 Model Compiler - Compile a tflite model to SR100")
70
+ user_text = gr.Markdown("")
71
 
72
+ # Setup model inputs
73
+ with gr.Row():
74
+ sram_slider = gr.Slider(minimum=0, maximum=3, step=0.1, label="Set total SRAM size available in MB", value=3)
75
+ tensor_slider = gr.Slider(minimum=0, maximum=3, step=0.1, label="Set the SRAM size for tensor calculations in MB", value=1.5)
76
+ optimize = gr.Radio(choices=["Performance", "Size"], value='Performance', label='Performance model')
77
+ model_loc = gr.Radio(choices=["sram", "flash"], value="sram", label='Model weights target')
78
 
 
79
 
80
+ # Setup model compile
81
+ model_text = gr.Textbox(label='TFLite model', value='hello_world.tflite')
82
  compile_btn = gr.Button("Compile Model")
83
+ compile_text = gr.Markdown("Waiting for model results")
 
84
 
85
  # Compute options
86
+ compile_btn.click(compile_model, inputs=[model_text, sram_slider, tensor_slider, optimize, model_loc], outputs=[compile_text])
87
+ sram_slider.change(fn=update_sliders, inputs=sram_slider, outputs=tensor_slider)
88
  demo.load(get_oauth_info, inputs=None, outputs=user_text)
89
 
90
  if __name__ == "__main__":