gourav3017 commited on
Commit
4a31a2e
·
1 Parent(s): 375bef0

Update app.py for new hub version

Browse files
Files changed (1) hide show
  1. app.py +6 -6
app.py CHANGED
@@ -15,7 +15,7 @@ token = os.getenv("HF_TOKEN")
15
  @st.cache_data
16
  def get_patient_ids():
17
  # Extract disease site from patient ID prefix (e.g., Lung_Patient_1)
18
- file = hf_hub_download(REPO_ID, repo_type="dataset", filename="data_info.jsonl", local_dir="./temp", use_auth_token=token)
19
  with open(file) as f:
20
  # data_info = json.load(f)
21
  data_info = [json.loads(line) for line in f]
@@ -37,7 +37,7 @@ def load_all_metadata(disease_site):
37
  structs = load_structure_metadata(patient_id)
38
  # Load beam metadata for the patient
39
  beams = load_beam_metadata(patient_id)
40
- planner_file = hf_hub_download(REPO_ID, repo_type="dataset", filename=f"data/{patient_id}/PlannerBeams.json", local_dir="./temp", use_auth_token=token)
41
  with open(planner_file) as f:
42
  planner_data = json.load(f)
43
  planner_beam_ids = planner_data.get("IDs", [])
@@ -51,7 +51,7 @@ def load_all_metadata(disease_site):
51
 
52
  @st.cache_data
53
  def load_structure_metadata(patient_id):
54
- file = hf_hub_download(REPO_ID, repo_type="dataset", filename=f"data/{patient_id}/StructureSet_MetaData.json", local_dir="./temp", use_auth_token=token)
55
  with open(file) as f:
56
  return json.load(f)
57
 
@@ -69,7 +69,7 @@ def load_beam_metadata(patient_id):
69
 
70
  beam_meta = []
71
  for path in beam_meta_paths:
72
- file = hf_hub_download(REPO_ID, repo_type="dataset", filename=path, local_dir="./temp", use_auth_token=token)
73
  with open(file) as f:
74
  beam_meta.append(json.load(f))
75
  return beam_meta
@@ -156,7 +156,7 @@ def download_data(repo_id, patient_ids, beam_ids=None, planner_beam_ids=True, ma
156
  repo_type="dataset",
157
  filename=hf_path,
158
  local_dir=local_dir,
159
- use_auth_token=token
160
  )
161
  downloaded_files.append(local_path)
162
  break
@@ -187,7 +187,7 @@ def download_data(repo_id, patient_ids, beam_ids=None, planner_beam_ids=True, ma
187
  repo_type="dataset",
188
  filename=hf_path,
189
  local_dir=local_dir,
190
- use_auth_token=token
191
  )
192
  downloaded_files.append(local_path)
193
  break
 
15
  @st.cache_data
16
  def get_patient_ids():
17
  # Extract disease site from patient ID prefix (e.g., Lung_Patient_1)
18
+ file = hf_hub_download(REPO_ID, repo_type="dataset", filename="data_info.jsonl", local_dir="./temp", token=token)
19
  with open(file) as f:
20
  # data_info = json.load(f)
21
  data_info = [json.loads(line) for line in f]
 
37
  structs = load_structure_metadata(patient_id)
38
  # Load beam metadata for the patient
39
  beams = load_beam_metadata(patient_id)
40
+ planner_file = hf_hub_download(REPO_ID, repo_type="dataset", filename=f"data/{patient_id}/PlannerBeams.json", local_dir="./temp", token=token)
41
  with open(planner_file) as f:
42
  planner_data = json.load(f)
43
  planner_beam_ids = planner_data.get("IDs", [])
 
51
 
52
  @st.cache_data
53
  def load_structure_metadata(patient_id):
54
+ file = hf_hub_download(REPO_ID, repo_type="dataset", filename=f"data/{patient_id}/StructureSet_MetaData.json", local_dir="./temp", token=token)
55
  with open(file) as f:
56
  return json.load(f)
57
 
 
69
 
70
  beam_meta = []
71
  for path in beam_meta_paths:
72
+ file = hf_hub_download(REPO_ID, repo_type="dataset", filename=path, local_dir="./temp", token=token)
73
  with open(file) as f:
74
  beam_meta.append(json.load(f))
75
  return beam_meta
 
156
  repo_type="dataset",
157
  filename=hf_path,
158
  local_dir=local_dir,
159
+ token=token
160
  )
161
  downloaded_files.append(local_path)
162
  break
 
187
  repo_type="dataset",
188
  filename=hf_path,
189
  local_dir=local_dir,
190
+ token=token
191
  )
192
  downloaded_files.append(local_path)
193
  break