Spaces:
Sleeping
Sleeping
Commit
·
f749037
1
Parent(s):
983c3fb
Add weights to git lfs
Browse files- .gitattributes +1 -0
- .gitignore +0 -1
- models/weights.h5 +3 -0
- notebooks/train.ipynb +1 -3
- src/utils/upload_dataset.py +36 -0
.gitattributes
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
models/weights.h5 filter=lfs diff=lfs merge=lfs -text
|
.gitignore
CHANGED
|
@@ -3,6 +3,5 @@ __pycache__/
|
|
| 3 |
.DS_Store
|
| 4 |
**/*.png
|
| 5 |
**/*.npy
|
| 6 |
-
**/*.h5
|
| 7 |
**/*.nii.gz
|
| 8 |
/data/raw
|
|
|
|
| 3 |
.DS_Store
|
| 4 |
**/*.png
|
| 5 |
**/*.npy
|
|
|
|
| 6 |
**/*.nii.gz
|
| 7 |
/data/raw
|
models/weights.h5
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:069f52eac88d5a6fe114b27ce6cbd9868c7432d54ef63bc6a502355058bd2641
|
| 3 |
+
size 93275616
|
notebooks/train.ipynb
CHANGED
|
@@ -8,7 +8,6 @@
|
|
| 8 |
"source": [
|
| 9 |
"import os\n",
|
| 10 |
"from skimage.transform import resize\n",
|
| 11 |
-
"from skimage.io import imsave\n",
|
| 12 |
"import numpy as np\n",
|
| 13 |
"from skimage.segmentation import mark_boundaries\n",
|
| 14 |
"from keras.models import Model\n",
|
|
@@ -17,9 +16,8 @@
|
|
| 17 |
"from keras.callbacks import ModelCheckpoint\n",
|
| 18 |
"from keras import backend as K\n",
|
| 19 |
"from skimage.exposure import rescale_intensity\n",
|
| 20 |
-
"from keras.callbacks import History\n",
|
| 21 |
"from skimage import io\n",
|
| 22 |
-
"from data import load_train_data, load_test_data"
|
| 23 |
]
|
| 24 |
},
|
| 25 |
{
|
|
|
|
| 8 |
"source": [
|
| 9 |
"import os\n",
|
| 10 |
"from skimage.transform import resize\n",
|
|
|
|
| 11 |
"import numpy as np\n",
|
| 12 |
"from skimage.segmentation import mark_boundaries\n",
|
| 13 |
"from keras.models import Model\n",
|
|
|
|
| 16 |
"from keras.callbacks import ModelCheckpoint\n",
|
| 17 |
"from keras import backend as K\n",
|
| 18 |
"from skimage.exposure import rescale_intensity\n",
|
|
|
|
| 19 |
"from skimage import io\n",
|
| 20 |
+
"from ..src.data.data_loader import load_train_data, load_test_data"
|
| 21 |
]
|
| 22 |
},
|
| 23 |
{
|
src/utils/upload_dataset.py
ADDED
|
@@ -0,0 +1,36 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from datasets import Dataset, Features, Value
|
| 2 |
+
import os
|
| 3 |
+
import nibabel as nib
|
| 4 |
+
|
| 5 |
+
# Define the paths to your raw data files
|
| 6 |
+
raw_data_path = './data/raw'
|
| 7 |
+
|
| 8 |
+
# Function to load NIfTI files
|
| 9 |
+
def load_nifti(file_path):
|
| 10 |
+
nifti = nib.load(file_path)
|
| 11 |
+
return nifti.get_fdata()
|
| 12 |
+
|
| 13 |
+
# Create a list to hold the data
|
| 14 |
+
data = []
|
| 15 |
+
|
| 16 |
+
# Iterate over the files in the raw data directory
|
| 17 |
+
for file_name in os.listdir(raw_data_path):
|
| 18 |
+
if file_name.endswith('.nii.gz'):
|
| 19 |
+
file_path = os.path.join(raw_data_path, file_name)
|
| 20 |
+
data.append({
|
| 21 |
+
'file_name': file_name,
|
| 22 |
+
'data': load_nifti(file_path).tolist() # Convert to list for serialization
|
| 23 |
+
})
|
| 24 |
+
|
| 25 |
+
# Define the features of the dataset
|
| 26 |
+
features = Features({
|
| 27 |
+
'file_name': Value('string'),
|
| 28 |
+
'data': Value('float32', id='data')
|
| 29 |
+
})
|
| 30 |
+
|
| 31 |
+
# Create Dataset object
|
| 32 |
+
dataset = Dataset.from_dict({'file_name': [d['file_name'] for d in data], 'data': [d['data'] for d in data]}, features=features)
|
| 33 |
+
|
| 34 |
+
if __name__ == "__main__":
|
| 35 |
+
# Push the dataset to Hugging Face
|
| 36 |
+
dataset.push_to_hub("ashutosh-pathak/liver-segmentation")
|