Anuj-Panthri commited on
Commit
25ef662
·
1 Parent(s): 57f9c25

added few things

Browse files
Files changed (6) hide show
  1. .gitignore +1 -0
  2. Dockerfile +1 -1
  3. backup.py +36 -0
  4. on_startup.sh +6 -1
  5. requirements.txt +4 -1
  6. restore.py +12 -0
.gitignore ADDED
@@ -0,0 +1 @@
 
 
1
+ .env
Dockerfile CHANGED
@@ -112,4 +112,4 @@ ENV PYTHONUNBUFFERED=1 \
112
  SYSTEM=spaces \
113
  SHELL=/bin/bash
114
 
115
- CMD ["./start_server.sh"]
 
112
  SYSTEM=spaces \
113
  SHELL=/bin/bash
114
 
115
+ CMD ["./on_startup.sh"]
backup.py ADDED
@@ -0,0 +1,36 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from dotenv import load_dotenv
2
+ load_dotenv()
3
+ from datasets import load_dataset
4
+ from huggingface_hub import CommitScheduler
5
+ from huggingface_hub import HfApi
6
+
7
+
8
+
9
+ dataset_name = "Anuj-Panthri/JupyterLab_Storage"
10
+ work_dir = "~/../../data/"
11
+ dataset_save_path = "."
12
+
13
+ def push_to_hub():
14
+ # backup "~/../../data/" to hugging face dataset "."
15
+ api = HfApi()
16
+
17
+ api.upload_folder(
18
+ repo_id=dataset_name,
19
+ repo_type="dataset",
20
+ folder_path=work_dir,
21
+ path_in_repo=dataset_save_path,
22
+ )
23
+
24
+ def commit_scheduler(minutes=10):
25
+
26
+ scheduler = CommitScheduler(
27
+ repo_id=dataset_name,
28
+ repo_type="dataset",
29
+ folder_path=work_dir,
30
+ path_in_repo=dataset_save_path,
31
+ every=minutes,
32
+ )
33
+
34
+
35
+ if __name__=="__main__":
36
+ commit_scheduler()
on_startup.sh CHANGED
@@ -2,4 +2,9 @@
2
  # Write some commands here that will run on root user before startup.
3
  # For example, to clone transformers and install it in dev mode:
4
  # git clone https://github.com/huggingface/transformers.git
5
- # cd transformers && pip install -e ".[dev]"
 
 
 
 
 
 
2
  # Write some commands here that will run on root user before startup.
3
  # For example, to clone transformers and install it in dev mode:
4
  # git clone https://github.com/huggingface/transformers.git
5
+ # cd transformers && pip install -e ".[dev]"
6
+
7
+ huggingface-cli login --token ${HF_TOKEN}
8
+ python3 restore.py
9
+ noup python3 backup.py
10
+ start_server.sh
requirements.txt CHANGED
@@ -1,4 +1,7 @@
1
  jupyterlab==3.6.1
2
  jupyter-server==2.3.0
3
  tornado==6.2
4
- ipywidgets
 
 
 
 
1
  jupyterlab==3.6.1
2
  jupyter-server==2.3.0
3
  tornado==6.2
4
+ ipywidgets
5
+ huggingface_hub
6
+ datasets
7
+ python-dotenv
restore.py ADDED
@@ -0,0 +1,12 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from huggingface_hub import snapshot_download
2
+
3
+ dataset_name = "Anuj-Panthri/JupyterLab_Storage"
4
+ work_dir = "~/../../data/"
5
+ dataset_save_path = "."
6
+
7
+ snapshot_download(
8
+ repo_id=dataset_name,
9
+ repo_type="dataset",
10
+ local_dir=work_dir,
11
+ local_dir_use_symlinks=False,
12
+ )