Borzyszkowski commited on
Commit
0f2b26a
·
1 Parent(s): daf4234

cleanup cache

Browse files
Files changed (1) hide show
  1. app.py +4 -1
app.py CHANGED
@@ -2,6 +2,7 @@
2
 
3
  import gradio as gr
4
  import os
 
5
  import torch
6
 
7
  from huggingface_hub import hf_hub_download
@@ -18,7 +19,7 @@ def download_model(cfg):
18
  repo_id=cfg.repo_id,
19
  filename=cfg.model_name,
20
  token=HF_TOKEN,
21
- cache_dir="./model-cache"
22
  )
23
  return model_path
24
 
@@ -66,6 +67,7 @@ if __name__ == '__main__':
66
  'model_type': 'transformer',
67
  'repo_id': "Borzyszkowski/AlpineLLM-model",
68
  'model_name': "best_model",
 
69
  }
70
  cfg = Config(cfg)
71
 
@@ -81,6 +83,7 @@ if __name__ == '__main__':
81
  hyperparam_cfg = Config(hyperparam_cfg)
82
 
83
  # Ensure model weights are available
 
84
  cfg.load_weights_path = download_model(cfg)
85
 
86
  # Start the application
 
2
 
3
  import gradio as gr
4
  import os
5
+ import shutil
6
  import torch
7
 
8
  from huggingface_hub import hf_hub_download
 
19
  repo_id=cfg.repo_id,
20
  filename=cfg.model_name,
21
  token=HF_TOKEN,
22
+ cache_dir=cfg.cache_dir
23
  )
24
  return model_path
25
 
 
67
  'model_type': 'transformer',
68
  'repo_id': "Borzyszkowski/AlpineLLM-model",
69
  'model_name': "best_model",
70
+ 'cache_dir': "./model-cache",
71
  }
72
  cfg = Config(cfg)
73
 
 
83
  hyperparam_cfg = Config(hyperparam_cfg)
84
 
85
  # Ensure model weights are available
86
+ shutil.rmtree(cfg.cache_dir, ignore_errors=True)
87
  cfg.load_weights_path = download_model(cfg)
88
 
89
  # Start the application