gourav3017 commited on
Commit
375bef0
·
1 Parent(s): 9579f6a

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +11 -6
app.py CHANGED
@@ -9,10 +9,13 @@ from huggingface_hub import list_repo_files
9
  # Replace this with your actual Hugging Face repo ID
10
  REPO_ID = "PortPy-Project/PortPy_Dataset"
11
 
 
 
 
12
  @st.cache_data
13
  def get_patient_ids():
14
  # Extract disease site from patient ID prefix (e.g., Lung_Patient_1)
15
- file = hf_hub_download(REPO_ID, repo_type="dataset", filename="data_info.jsonl", local_dir="./temp")
16
  with open(file) as f:
17
  # data_info = json.load(f)
18
  data_info = [json.loads(line) for line in f]
@@ -34,7 +37,7 @@ def load_all_metadata(disease_site):
34
  structs = load_structure_metadata(patient_id)
35
  # Load beam metadata for the patient
36
  beams = load_beam_metadata(patient_id)
37
- planner_file = hf_hub_download(REPO_ID, repo_type="dataset", filename=f"data/{patient_id}/PlannerBeams.json", local_dir="./temp")
38
  with open(planner_file) as f:
39
  planner_data = json.load(f)
40
  planner_beam_ids = planner_data.get("IDs", [])
@@ -48,7 +51,7 @@ def load_all_metadata(disease_site):
48
 
49
  @st.cache_data
50
  def load_structure_metadata(patient_id):
51
- file = hf_hub_download(REPO_ID, repo_type="dataset", filename=f"data/{patient_id}/StructureSet_MetaData.json", local_dir="./temp")
52
  with open(file) as f:
53
  return json.load(f)
54
 
@@ -66,7 +69,7 @@ def load_beam_metadata(patient_id):
66
 
67
  beam_meta = []
68
  for path in beam_meta_paths:
69
- file = hf_hub_download(REPO_ID, repo_type="dataset", filename=path, local_dir="./temp")
70
  with open(file) as f:
71
  beam_meta.append(json.load(f))
72
  return beam_meta
@@ -152,7 +155,8 @@ def download_data(repo_id, patient_ids, beam_ids=None, planner_beam_ids=True, ma
152
  repo_id=repo_id,
153
  repo_type="dataset",
154
  filename=hf_path,
155
- local_dir=local_dir
 
156
  )
157
  downloaded_files.append(local_path)
158
  break
@@ -182,7 +186,8 @@ def download_data(repo_id, patient_ids, beam_ids=None, planner_beam_ids=True, ma
182
  repo_id=repo_id,
183
  repo_type="dataset",
184
  filename=hf_path,
185
- local_dir=local_dir
 
186
  )
187
  downloaded_files.append(local_path)
188
  break
 
9
  # Replace this with your actual Hugging Face repo ID
10
  REPO_ID = "PortPy-Project/PortPy_Dataset"
11
 
12
+ # Load from private repo using token
13
+ token = os.getenv("HF_TOKEN")
14
+
15
  @st.cache_data
16
  def get_patient_ids():
17
  # Extract disease site from patient ID prefix (e.g., Lung_Patient_1)
18
+ file = hf_hub_download(REPO_ID, repo_type="dataset", filename="data_info.jsonl", local_dir="./temp", use_auth_token=token)
19
  with open(file) as f:
20
  # data_info = json.load(f)
21
  data_info = [json.loads(line) for line in f]
 
37
  structs = load_structure_metadata(patient_id)
38
  # Load beam metadata for the patient
39
  beams = load_beam_metadata(patient_id)
40
+ planner_file = hf_hub_download(REPO_ID, repo_type="dataset", filename=f"data/{patient_id}/PlannerBeams.json", local_dir="./temp", use_auth_token=token)
41
  with open(planner_file) as f:
42
  planner_data = json.load(f)
43
  planner_beam_ids = planner_data.get("IDs", [])
 
51
 
52
  @st.cache_data
53
  def load_structure_metadata(patient_id):
54
+ file = hf_hub_download(REPO_ID, repo_type="dataset", filename=f"data/{patient_id}/StructureSet_MetaData.json", local_dir="./temp", use_auth_token=token)
55
  with open(file) as f:
56
  return json.load(f)
57
 
 
69
 
70
  beam_meta = []
71
  for path in beam_meta_paths:
72
+ file = hf_hub_download(REPO_ID, repo_type="dataset", filename=path, local_dir="./temp", use_auth_token=token)
73
  with open(file) as f:
74
  beam_meta.append(json.load(f))
75
  return beam_meta
 
155
  repo_id=repo_id,
156
  repo_type="dataset",
157
  filename=hf_path,
158
+ local_dir=local_dir,
159
+ use_auth_token=token
160
  )
161
  downloaded_files.append(local_path)
162
  break
 
186
  repo_id=repo_id,
187
  repo_type="dataset",
188
  filename=hf_path,
189
+ local_dir=local_dir,
190
+ use_auth_token=token
191
  )
192
  downloaded_files.append(local_path)
193
  break