chrimerss commited on
Commit
e416ff0
·
verified ·
1 Parent(s): ff777c0

Upload folder using huggingface_hub

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .gitattributes +84 -0
  2. alpha0_usa.tif +3 -0
  3. alpha_usa.tif +3 -0
  4. b_usa.tif +3 -0
  5. beta_usa.tif +3 -0
  6. data.tar.gz +3 -0
  7. dem_usa.tif +3 -0
  8. facc_usa.tif +3 -0
  9. fdir_usa.tif +3 -0
  10. fix.py +56 -0
  11. im_usa.tif +3 -0
  12. ksat_usa.tif +3 -0
  13. leaki_usa.tif +3 -0
  14. maxunitstreamflow_to_zarr.py +458 -0
  15. streamflow_stats.npz +3 -0
  16. streamflow_stats_pixelwise.npz +3 -0
  17. wm_usa.tif +3 -0
  18. wofs_0704/subset_wofs_ALL_00_20250704_0600_0600.nc +3 -0
  19. wofs_0704/subset_wofs_ALL_01_20250704_0600_0605.nc +3 -0
  20. wofs_0704/subset_wofs_ALL_02_20250704_0600_0610.nc +3 -0
  21. wofs_0704/subset_wofs_ALL_03_20250704_0600_0615.nc +3 -0
  22. wofs_0704/subset_wofs_ALL_04_20250704_0600_0620.nc +3 -0
  23. wofs_0704/subset_wofs_ALL_05_20250704_0600_0625.nc +3 -0
  24. wofs_0704/subset_wofs_ALL_06_20250704_0600_0630.nc +3 -0
  25. wofs_0704/subset_wofs_ALL_07_20250704_0600_0635.nc +3 -0
  26. wofs_0704/subset_wofs_ALL_08_20250704_0600_0640.nc +3 -0
  27. wofs_0704/subset_wofs_ALL_09_20250704_0600_0645.nc +3 -0
  28. wofs_0704/subset_wofs_ALL_10_20250704_0600_0650.nc +3 -0
  29. wofs_0704/subset_wofs_ALL_11_20250704_0600_0655.nc +3 -0
  30. wofs_0704/subset_wofs_ALL_12_20250704_0600_0700.nc +3 -0
  31. wofs_0704/subset_wofs_ALL_13_20250704_0600_0705.nc +3 -0
  32. wofs_0704/subset_wofs_ALL_14_20250704_0600_0710.nc +3 -0
  33. wofs_0704/subset_wofs_ALL_15_20250704_0600_0715.nc +3 -0
  34. wofs_0704/subset_wofs_ALL_16_20250704_0600_0720.nc +3 -0
  35. wofs_0704/subset_wofs_ALL_17_20250704_0600_0725.nc +3 -0
  36. wofs_0704/subset_wofs_ALL_18_20250704_0600_0730.nc +3 -0
  37. wofs_0704/subset_wofs_ALL_19_20250704_0600_0735.nc +3 -0
  38. wofs_0704/subset_wofs_ALL_20_20250704_0600_0740.nc +3 -0
  39. wofs_0704/subset_wofs_ALL_21_20250704_0600_0745.nc +3 -0
  40. wofs_0704/subset_wofs_ALL_22_20250704_0600_0750.nc +3 -0
  41. wofs_0704/subset_wofs_ALL_23_20250704_0600_0755.nc +3 -0
  42. wofs_0704/subset_wofs_ALL_24_20250704_0600_0800.nc +3 -0
  43. wofs_0704/subset_wofs_ALL_25_20250704_0600_0805.nc +3 -0
  44. wofs_0704/subset_wofs_ALL_26_20250704_0600_0810.nc +3 -0
  45. wofs_0704/subset_wofs_ALL_27_20250704_0600_0815.nc +3 -0
  46. wofs_0704/subset_wofs_ALL_28_20250704_0600_0820.nc +3 -0
  47. wofs_0704/subset_wofs_ALL_29_20250704_0600_0825.nc +3 -0
  48. wofs_0704/subset_wofs_ALL_30_20250704_0600_0830.nc +3 -0
  49. wofs_0704/subset_wofs_ALL_31_20250704_0600_0835.nc +3 -0
  50. wofs_0704/subset_wofs_ALL_32_20250704_0600_0840.nc +3 -0
.gitattributes CHANGED
@@ -58,3 +58,87 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
58
  # Video files - compressed
59
  *.mp4 filter=lfs diff=lfs merge=lfs -text
60
  *.webm filter=lfs diff=lfs merge=lfs -text
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
58
  # Video files - compressed
59
  *.mp4 filter=lfs diff=lfs merge=lfs -text
60
  *.webm filter=lfs diff=lfs merge=lfs -text
61
+ alpha0_usa.tif filter=lfs diff=lfs merge=lfs -text
62
+ alpha_usa.tif filter=lfs diff=lfs merge=lfs -text
63
+ b_usa.tif filter=lfs diff=lfs merge=lfs -text
64
+ beta_usa.tif filter=lfs diff=lfs merge=lfs -text
65
+ dem_usa.tif filter=lfs diff=lfs merge=lfs -text
66
+ facc_usa.tif filter=lfs diff=lfs merge=lfs -text
67
+ fdir_usa.tif filter=lfs diff=lfs merge=lfs -text
68
+ im_usa.tif filter=lfs diff=lfs merge=lfs -text
69
+ ksat_usa.tif filter=lfs diff=lfs merge=lfs -text
70
+ leaki_usa.tif filter=lfs diff=lfs merge=lfs -text
71
+ wm_usa.tif filter=lfs diff=lfs merge=lfs -text
72
+ wofs_0704/subset_wofs_ALL_00_20250704_0600_0600.nc filter=lfs diff=lfs merge=lfs -text
73
+ wofs_0704/subset_wofs_ALL_01_20250704_0600_0605.nc filter=lfs diff=lfs merge=lfs -text
74
+ wofs_0704/subset_wofs_ALL_02_20250704_0600_0610.nc filter=lfs diff=lfs merge=lfs -text
75
+ wofs_0704/subset_wofs_ALL_03_20250704_0600_0615.nc filter=lfs diff=lfs merge=lfs -text
76
+ wofs_0704/subset_wofs_ALL_04_20250704_0600_0620.nc filter=lfs diff=lfs merge=lfs -text
77
+ wofs_0704/subset_wofs_ALL_05_20250704_0600_0625.nc filter=lfs diff=lfs merge=lfs -text
78
+ wofs_0704/subset_wofs_ALL_06_20250704_0600_0630.nc filter=lfs diff=lfs merge=lfs -text
79
+ wofs_0704/subset_wofs_ALL_07_20250704_0600_0635.nc filter=lfs diff=lfs merge=lfs -text
80
+ wofs_0704/subset_wofs_ALL_08_20250704_0600_0640.nc filter=lfs diff=lfs merge=lfs -text
81
+ wofs_0704/subset_wofs_ALL_09_20250704_0600_0645.nc filter=lfs diff=lfs merge=lfs -text
82
+ wofs_0704/subset_wofs_ALL_10_20250704_0600_0650.nc filter=lfs diff=lfs merge=lfs -text
83
+ wofs_0704/subset_wofs_ALL_11_20250704_0600_0655.nc filter=lfs diff=lfs merge=lfs -text
84
+ wofs_0704/subset_wofs_ALL_12_20250704_0600_0700.nc filter=lfs diff=lfs merge=lfs -text
85
+ wofs_0704/subset_wofs_ALL_13_20250704_0600_0705.nc filter=lfs diff=lfs merge=lfs -text
86
+ wofs_0704/subset_wofs_ALL_14_20250704_0600_0710.nc filter=lfs diff=lfs merge=lfs -text
87
+ wofs_0704/subset_wofs_ALL_15_20250704_0600_0715.nc filter=lfs diff=lfs merge=lfs -text
88
+ wofs_0704/subset_wofs_ALL_16_20250704_0600_0720.nc filter=lfs diff=lfs merge=lfs -text
89
+ wofs_0704/subset_wofs_ALL_17_20250704_0600_0725.nc filter=lfs diff=lfs merge=lfs -text
90
+ wofs_0704/subset_wofs_ALL_18_20250704_0600_0730.nc filter=lfs diff=lfs merge=lfs -text
91
+ wofs_0704/subset_wofs_ALL_19_20250704_0600_0735.nc filter=lfs diff=lfs merge=lfs -text
92
+ wofs_0704/subset_wofs_ALL_20_20250704_0600_0740.nc filter=lfs diff=lfs merge=lfs -text
93
+ wofs_0704/subset_wofs_ALL_21_20250704_0600_0745.nc filter=lfs diff=lfs merge=lfs -text
94
+ wofs_0704/subset_wofs_ALL_22_20250704_0600_0750.nc filter=lfs diff=lfs merge=lfs -text
95
+ wofs_0704/subset_wofs_ALL_23_20250704_0600_0755.nc filter=lfs diff=lfs merge=lfs -text
96
+ wofs_0704/subset_wofs_ALL_24_20250704_0600_0800.nc filter=lfs diff=lfs merge=lfs -text
97
+ wofs_0704/subset_wofs_ALL_25_20250704_0600_0805.nc filter=lfs diff=lfs merge=lfs -text
98
+ wofs_0704/subset_wofs_ALL_26_20250704_0600_0810.nc filter=lfs diff=lfs merge=lfs -text
99
+ wofs_0704/subset_wofs_ALL_27_20250704_0600_0815.nc filter=lfs diff=lfs merge=lfs -text
100
+ wofs_0704/subset_wofs_ALL_28_20250704_0600_0820.nc filter=lfs diff=lfs merge=lfs -text
101
+ wofs_0704/subset_wofs_ALL_29_20250704_0600_0825.nc filter=lfs diff=lfs merge=lfs -text
102
+ wofs_0704/subset_wofs_ALL_30_20250704_0600_0830.nc filter=lfs diff=lfs merge=lfs -text
103
+ wofs_0704/subset_wofs_ALL_31_20250704_0600_0835.nc filter=lfs diff=lfs merge=lfs -text
104
+ wofs_0704/subset_wofs_ALL_32_20250704_0600_0840.nc filter=lfs diff=lfs merge=lfs -text
105
+ wofs_0704/subset_wofs_ALL_33_20250704_0600_0845.nc filter=lfs diff=lfs merge=lfs -text
106
+ wofs_0704/subset_wofs_ALL_34_20250704_0600_0850.nc filter=lfs diff=lfs merge=lfs -text
107
+ wofs_0704/subset_wofs_ALL_35_20250704_0600_0855.nc filter=lfs diff=lfs merge=lfs -text
108
+ wofs_0704/subset_wofs_ALL_36_20250704_0600_0900.nc filter=lfs diff=lfs merge=lfs -text
109
+ wofs_0704/subset_wofs_ALL_37_20250704_0600_0905.nc filter=lfs diff=lfs merge=lfs -text
110
+ wofs_0704/subset_wofs_ALL_38_20250704_0600_0910.nc filter=lfs diff=lfs merge=lfs -text
111
+ wofs_0704/subset_wofs_ALL_39_20250704_0600_0915.nc filter=lfs diff=lfs merge=lfs -text
112
+ wofs_0704/subset_wofs_ALL_40_20250704_0600_0920.nc filter=lfs diff=lfs merge=lfs -text
113
+ wofs_0704/subset_wofs_ALL_41_20250704_0600_0925.nc filter=lfs diff=lfs merge=lfs -text
114
+ wofs_0704/subset_wofs_ALL_42_20250704_0600_0930.nc filter=lfs diff=lfs merge=lfs -text
115
+ wofs_0704/subset_wofs_ALL_43_20250704_0600_0935.nc filter=lfs diff=lfs merge=lfs -text
116
+ wofs_0704/subset_wofs_ALL_44_20250704_0600_0940.nc filter=lfs diff=lfs merge=lfs -text
117
+ wofs_0704/subset_wofs_ALL_45_20250704_0600_0945.nc filter=lfs diff=lfs merge=lfs -text
118
+ wofs_0704/subset_wofs_ALL_46_20250704_0600_0950.nc filter=lfs diff=lfs merge=lfs -text
119
+ wofs_0704/subset_wofs_ALL_47_20250704_0600_0955.nc filter=lfs diff=lfs merge=lfs -text
120
+ wofs_0704/subset_wofs_ALL_48_20250704_0600_1000.nc filter=lfs diff=lfs merge=lfs -text
121
+ wofs_0704/subset_wofs_ALL_49_20250704_0600_1005.nc filter=lfs diff=lfs merge=lfs -text
122
+ wofs_0704/subset_wofs_ALL_50_20250704_0600_1010.nc filter=lfs diff=lfs merge=lfs -text
123
+ wofs_0704/subset_wofs_ALL_51_20250704_0600_1015.nc filter=lfs diff=lfs merge=lfs -text
124
+ wofs_0704/subset_wofs_ALL_52_20250704_0600_1020.nc filter=lfs diff=lfs merge=lfs -text
125
+ wofs_0704/subset_wofs_ALL_53_20250704_0600_1025.nc filter=lfs diff=lfs merge=lfs -text
126
+ wofs_0704/subset_wofs_ALL_54_20250704_0600_1030.nc filter=lfs diff=lfs merge=lfs -text
127
+ wofs_0704/subset_wofs_ALL_55_20250704_0600_1035.nc filter=lfs diff=lfs merge=lfs -text
128
+ wofs_0704/subset_wofs_ALL_56_20250704_0600_1040.nc filter=lfs diff=lfs merge=lfs -text
129
+ wofs_0704/subset_wofs_ALL_57_20250704_0600_1045.nc filter=lfs diff=lfs merge=lfs -text
130
+ wofs_0704/subset_wofs_ALL_58_20250704_0600_1050.nc filter=lfs diff=lfs merge=lfs -text
131
+ wofs_0704/subset_wofs_ALL_59_20250704_0600_1055.nc filter=lfs diff=lfs merge=lfs -text
132
+ wofs_0704/subset_wofs_ALL_60_20250704_0600_1100.nc filter=lfs diff=lfs merge=lfs -text
133
+ wofs_0704/subset_wofs_ALL_61_20250704_0600_1105.nc filter=lfs diff=lfs merge=lfs -text
134
+ wofs_0704/subset_wofs_ALL_62_20250704_0600_1110.nc filter=lfs diff=lfs merge=lfs -text
135
+ wofs_0704/subset_wofs_ALL_63_20250704_0600_1115.nc filter=lfs diff=lfs merge=lfs -text
136
+ wofs_0704/subset_wofs_ALL_64_20250704_0600_1120.nc filter=lfs diff=lfs merge=lfs -text
137
+ wofs_0704/subset_wofs_ALL_65_20250704_0600_1125.nc filter=lfs diff=lfs merge=lfs -text
138
+ wofs_0704/subset_wofs_ALL_66_20250704_0600_1130.nc filter=lfs diff=lfs merge=lfs -text
139
+ wofs_0704/subset_wofs_ALL_67_20250704_0600_1135.nc filter=lfs diff=lfs merge=lfs -text
140
+ wofs_0704/subset_wofs_ALL_68_20250704_0600_1140.nc filter=lfs diff=lfs merge=lfs -text
141
+ wofs_0704/subset_wofs_ALL_69_20250704_0600_1145.nc filter=lfs diff=lfs merge=lfs -text
142
+ wofs_0704/subset_wofs_ALL_70_20250704_0600_1150.nc filter=lfs diff=lfs merge=lfs -text
143
+ wofs_0704/subset_wofs_ALL_71_20250704_0600_1155.nc filter=lfs diff=lfs merge=lfs -text
144
+ wofs_0704/subset_wofs_ALL_72_20250704_0600_1200.nc filter=lfs diff=lfs merge=lfs -text
alpha0_usa.tif ADDED

Git LFS Details

  • SHA256: 13c9468b8f5081b409f32d271bc61ed3dc608fe0203f06e88ff45ed5cdc07b17
  • Pointer size: 133 Bytes
  • Size of remote file: 40 MB
alpha_usa.tif ADDED

Git LFS Details

  • SHA256: a0438e9cb0c4c2c60cb4c1c14bab2b57d3d95cd8f3acefd1e776ac4e91f7febb
  • Pointer size: 133 Bytes
  • Size of remote file: 15.2 MB
b_usa.tif ADDED

Git LFS Details

  • SHA256: 221f45cf20d13fbe4e1a75a13e5363b7f1389f430b679a3fe4f58261bcc7d390
  • Pointer size: 132 Bytes
  • Size of remote file: 7.73 MB
beta_usa.tif ADDED

Git LFS Details

  • SHA256: 3181a2f98fff6ea1cc76dde87f71299efe78f97df856d10a3ff59e20c73e5ded
  • Pointer size: 133 Bytes
  • Size of remote file: 15.1 MB
data.tar.gz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:41a484e8e79327fd1660638dbd7394d0b8e64022ea183ae14eed5a9a9d60df8a
3
+ size 159089592376
dem_usa.tif ADDED

Git LFS Details

  • SHA256: b5e79d5013beee76f88ed69dfc2ae5c759100f142f766e1d09ea46394170007f
  • Pointer size: 133 Bytes
  • Size of remote file: 40.3 MB
facc_usa.tif ADDED

Git LFS Details

  • SHA256: 532112eb450584471ff722889b30b7f8ca0d284aa953d53665517ac660a68b5a
  • Pointer size: 133 Bytes
  • Size of remote file: 13.4 MB
fdir_usa.tif ADDED

Git LFS Details

  • SHA256: 174af97bc3dc7cd624fffa3157c34cc1464902707418a8eba447d2194d3fe293
  • Pointer size: 132 Bytes
  • Size of remote file: 7.73 MB
fix.py ADDED
@@ -0,0 +1,56 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python3
2
+ """
3
+ In-place repair for a Zarr store with mismatched time dimension lengths.
4
+
5
+ - Compares the length of the 1D `time` array with the leading dimension of
6
+ the 3D `maxunitstreamflow` array.
7
+ - Resizes the longer one down to the shorter to restore consistency.
8
+
9
+ Usage:
10
+ ml conda; conda activate credit
11
+ python /glade/work/li1995/FLASH/data/fix.py
12
+ """
13
+
14
+ from __future__ import annotations
15
+
16
+ import zarr
17
+
18
+
19
+ STORE_PATH = "/glade/derecho/scratch/li1995/data/maxunitstreamflow_2021_2025_10min.zarr"
20
+
21
+
22
+ def main() -> None:
23
+ root = zarr.open(STORE_PATH, mode="r+")
24
+
25
+ time_arr = root["time"] # shape: (T,)
26
+ data_arr = root["maxunitstreamflow"] # shape: (T, Y, X)
27
+ lat_arr = root.get("latitude")
28
+ lon_arr = root.get("longitude")
29
+
30
+ t_time = int(time_arr.shape[0])
31
+ t_data = int(data_arr.shape[0])
32
+ y_size = int(data_arr.shape[1])
33
+ x_size = int(data_arr.shape[2])
34
+
35
+ print(f"Before: time={t_time}, data_time={t_data}, data_shape={data_arr.shape}")
36
+ if lat_arr is not None:
37
+ print(f"Latitude size: {lat_arr.shape}")
38
+ if lon_arr is not None:
39
+ print(f"Longitude size: {lon_arr.shape}")
40
+
41
+ target_len = min(t_time, t_data)
42
+ if t_data > target_len:
43
+ print(f"Resizing maxunitstreamflow from {t_data} to {target_len}")
44
+ data_arr.resize((target_len, y_size, x_size))
45
+ if t_time > target_len:
46
+ print(f"Resizing time from {t_time} to {target_len}")
47
+ time_arr.resize((target_len,))
48
+
49
+ # Re-read and report
50
+ t_time2 = int(time_arr.shape[0])
51
+ t_data2 = int(data_arr.shape[0])
52
+ print(f"After: time={t_time2}, data_time={t_data2}, data_shape={data_arr.shape}")
53
+
54
+
55
+ if __name__ == "__main__":
56
+ main()
im_usa.tif ADDED

Git LFS Details

  • SHA256: f5637f2c12bfb8fc3a7c1dff77cba4733d26456f47974f775c6680e6a63fed96
  • Pointer size: 133 Bytes
  • Size of remote file: 22.3 MB
ksat_usa.tif ADDED

Git LFS Details

  • SHA256: cc24d0069e623fb76f06938e602eef0055c1fcc9aaa6525e956711e69c4889ee
  • Pointer size: 133 Bytes
  • Size of remote file: 16.9 MB
leaki_usa.tif ADDED

Git LFS Details

  • SHA256: d163a6168e2cbc099c07bf19048c2aa2aee095d729c47f7ce241e2891132bf0d
  • Pointer size: 133 Bytes
  • Size of remote file: 36.1 MB
maxunitstreamflow_to_zarr.py ADDED
@@ -0,0 +1,458 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python3
2
+
3
+ """
4
+ Convert MRMS FLASH_CREST_MAXUNITSTREAMFLOW_00.00 GRIB2 tiles to a single Zarr store.
5
+
6
+ - Reads 10-minute GRIB2 files by day from the input root (e.g.,
7
+ /glade/derecho/scratch/li1995/data/FLASH_CREST_MAXUNITSTREAMFLOW_00.00/YYYYMMDD/...).
8
+ - Aligns latitude/longitude to an existing precip Zarr template that shares the grid.
9
+ - Writes float32 data with Zarr chunking (time=1, lat=438, lon=875) using parallel I/O per day.
10
+ - Appends day-by-day to keep memory bounded; can utilize larger memory (e.g., 128GB) by
11
+ increasing per-day parallelism.
12
+
13
+ Testing (login node, small sample):
14
+ ml conda; conda activate credit
15
+ python /glade/work/li1995/FLASH/data/maxunitstreamflow_to_zarr.py \
16
+ --input-root /glade/derecho/scratch/li1995/data/FLASH_CREST_MAXUNITSTREAMFLOW_00.00 \
17
+ --template-zarr /glade/derecho/scratch/li1995/data/precip_2021_2025.zarr \
18
+ --output-zarr /glade/derecho/scratch/li1995/data/maxunitstreamflow_2021_2025_10min.zarr \
19
+ --start 2021-01-01 --end 2021-01-02 \
20
+ --workers 8 --test-limit 36
21
+
22
+ Full run (batch node with more memory):
23
+ python /glade/work/li1995/FLASH/data/maxunitstreamflow_to_zarr.py \
24
+ --input-root /glade/derecho/scratch/li1995/data/FLASH_CREST_MAXUNITSTREAMFLOW_00.00 \
25
+ --template-zarr /glade/derecho/scratch/li1995/data/precip_2021_2025.zarr \
26
+ --output-zarr /glade/derecho/scratch/li1995/data/maxunitstreamflow_2021_2025_10min.zarr \
27
+ --start 2021-01-01 --end 2025-08-01 \
28
+ --workers 24
29
+ """
30
+
31
+ from __future__ import annotations
32
+
33
+ import argparse
34
+ import concurrent.futures
35
+ import datetime as dt
36
+ import os
37
+ import re
38
+ from pathlib import Path
39
+ from typing import Iterable, List, Optional, Sequence, Tuple
40
+
41
+ import numpy as np
42
+ import xarray as xr
43
+ from zarr.codecs import Zstd as ZstdCodec
44
+
45
+ # Avoid overly verbose warnings when opening many GRIBs
46
+ os.environ.setdefault("CFGRIB_LOGGING_LEVEL", "ERROR")
47
+
48
+
49
+ FILENAME_RE = re.compile(r"_([0-9]{8})-([0-9]{6})\.grib2$")
50
+
51
+
52
+ def parse_args() -> argparse.Namespace:
53
+ parser = argparse.ArgumentParser(
54
+ description="Build MAXUNITSTREAMFLOW Zarr from daily MRMS GRIB2 files"
55
+ )
56
+ parser.add_argument(
57
+ "--input-root",
58
+ type=str,
59
+ required=True,
60
+ help="Root directory containing YYYYMMDD/ with GRIB2 files",
61
+ )
62
+ parser.add_argument(
63
+ "--template-zarr",
64
+ type=str,
65
+ required=True,
66
+ help="Existing precip Zarr with matching latitude/longitude",
67
+ )
68
+ parser.add_argument(
69
+ "--output-zarr",
70
+ type=str,
71
+ required=True,
72
+ help="Output Zarr store path to create/append",
73
+ )
74
+ parser.add_argument(
75
+ "--start",
76
+ type=str,
77
+ default="2021-01-01",
78
+ help="Start date (YYYY-MM-DD)",
79
+ )
80
+ parser.add_argument(
81
+ "--end",
82
+ type=str,
83
+ default="2025-08-01",
84
+ help="End date inclusive (YYYY-MM-DD)",
85
+ )
86
+ parser.add_argument(
87
+ "--workers",
88
+ type=int,
89
+ default=16,
90
+ help="Parallel workers for per-day GRIB reads",
91
+ )
92
+ parser.add_argument(
93
+ "--test-limit",
94
+ type=int,
95
+ default=0,
96
+ help="If >0, limit number of timesteps per day for testing",
97
+ )
98
+ parser.add_argument(
99
+ "--rechunk-lat",
100
+ type=int,
101
+ default=438,
102
+ help="Latitude chunk size",
103
+ )
104
+ parser.add_argument(
105
+ "--rechunk-lon",
106
+ type=int,
107
+ default=875,
108
+ help="Longitude chunk size",
109
+ )
110
+ return parser.parse_args()
111
+
112
+
113
+ def daterange(start: dt.date, end_inclusive: dt.date) -> Iterable[dt.date]:
114
+ day = start
115
+ one = dt.timedelta(days=1)
116
+ while day <= end_inclusive:
117
+ yield day
118
+ day += one
119
+
120
+
121
+ def list_grib2_files(day_dir: Path) -> List[Path]:
122
+ if not day_dir.is_dir():
123
+ return []
124
+ return sorted(p for p in day_dir.iterdir() if p.name.endswith(".grib2"))
125
+
126
+
127
+ def extract_datetime_from_name(path: Path) -> Optional[np.datetime64]:
128
+ m = FILENAME_RE.search(path.name)
129
+ if not m:
130
+ return None
131
+ ymd, hms = m.group(1), m.group(2)
132
+ try:
133
+ dt_obj = dt.datetime.strptime(f"{ymd}{hms}", "%Y%m%d%H%M%S")
134
+ return np.datetime64(dt_obj)
135
+ except Exception:
136
+ return None
137
+
138
+
139
+ def open_grib2_as_array(
140
+ file_path: Path,
141
+ target_shape: Tuple[int, int],
142
+ missing_to_nan: bool = True,
143
+ ) -> np.ndarray:
144
+ """Open GRIB2 with cfgrib and return data as float32 array (1, y, x).
145
+
146
+ Chooses the first data variable that has both latitude and longitude in dims.
147
+ """
148
+ ds = xr.open_dataset(
149
+ file_path.as_posix(), engine="cfgrib", backend_kwargs={}, chunks={}
150
+ )
151
+ try:
152
+ var_name = None
153
+ for name, da in ds.data_vars.items():
154
+ if "latitude" in da.dims and "longitude" in da.dims:
155
+ var_name = name
156
+ break
157
+ if var_name is None:
158
+ # fall back to first variable
159
+ var_name = next(iter(ds.data_vars.keys()))
160
+ da = ds[var_name]
161
+ data = da.values # loads into memory for this tile
162
+ if data.ndim == 2:
163
+ data2d = data
164
+ elif data.ndim == 3 and data.shape[0] == 1:
165
+ data2d = data[0]
166
+ else:
167
+ raise ValueError(
168
+ f"Unexpected data shape {data.shape} in {file_path.name} for var {var_name}"
169
+ )
170
+ if data2d.shape != target_shape:
171
+ raise ValueError(
172
+ f"Array shape {data2d.shape} != expected {target_shape} for {file_path.name}"
173
+ )
174
+ arr = data2d.astype(np.float32, copy=False)
175
+ if missing_to_nan:
176
+ # Replace typical GRIB missing sentinel with NaN if present
177
+ # 3.4028235e+38 is float32 max
178
+ arr = np.where(arr >= np.finfo(np.float32).max * 0.99, np.nan, arr)
179
+ # add time axis
180
+ return arr[None, ...]
181
+ finally:
182
+ ds.close()
183
+
184
+
185
+ def datetime64_to_epoch_seconds(times: Sequence[np.datetime64]) -> np.ndarray:
186
+ """Convert datetime64 values to float64 seconds since 1970-01-01.
187
+
188
+ This avoids cftime issues and matches the precip Zarr convention.
189
+ """
190
+ if len(times) == 0:
191
+ return np.array([], dtype=np.float64)
192
+ t_ns = np.array(times, dtype="datetime64[ns]")
193
+ base = np.datetime64("1970-01-01", "ns")
194
+ delta_ns = (t_ns - base).astype("timedelta64[ns]").astype(np.int64)
195
+ return (delta_ns.astype(np.float64) / 1e9).astype(np.float64)
196
+
197
+
198
+ def build_day_arrays(
199
+ grib_files: Sequence[Path],
200
+ lat_size: int,
201
+ lon_size: int,
202
+ workers: int,
203
+ test_limit: int,
204
+ ) -> Tuple[np.ndarray, List[np.datetime64]]:
205
+ """Read a day's GRIB tiles in parallel → stacked (T, Y, X) float32 and time list."""
206
+ if test_limit > 0:
207
+ grib_files = grib_files[:test_limit]
208
+
209
+ times: List[Tuple[np.datetime64, int]] = []
210
+ for i, f in enumerate(grib_files):
211
+ t = extract_datetime_from_name(f)
212
+ if t is not None:
213
+ times.append((t, i))
214
+ # Keep only files with parseable time
215
+ if not times:
216
+ return np.empty((0, lat_size, lon_size), dtype=np.float32), []
217
+ times_sorted = sorted(times, key=lambda x: x[0])
218
+ sorted_indices = [i for _, i in times_sorted]
219
+ sorted_files = [grib_files[i] for i in sorted_indices]
220
+ sorted_times = [t for t, _ in times_sorted]
221
+
222
+ target_shape = (lat_size, lon_size)
223
+
224
+ def _load(path: Path) -> np.ndarray:
225
+ return open_grib2_as_array(path, target_shape=target_shape, missing_to_nan=True)
226
+
227
+ with concurrent.futures.ThreadPoolExecutor(max_workers=workers) as ex:
228
+ arrays = list(ex.map(_load, sorted_files))
229
+
230
+ if not arrays:
231
+ return np.empty((0, lat_size, lon_size), dtype=np.float32), []
232
+
233
+ stacked = np.concatenate(arrays, axis=0)
234
+ return stacked, sorted_times
235
+
236
+
237
+ def ensure_output_store(
238
+ output_zarr: str,
239
+ var_name: str,
240
+ lat: xr.DataArray,
241
+ lon: xr.DataArray,
242
+ chunks: Tuple[int, int, int],
243
+ compressor: ZstdCodec,
244
+ ) -> None:
245
+ """Create the Zarr store if missing with metadata and encodings."""
246
+ if Path(output_zarr).exists():
247
+ return
248
+ empty = xr.DataArray(
249
+ data=np.empty((0, lat.size, lon.size), dtype=np.float32),
250
+ dims=("time", "latitude", "longitude"),
251
+ coords={
252
+ # Use float64 seconds since epoch to mirror precip store
253
+ "time": np.array([], dtype=np.float64),
254
+ "latitude": lat,
255
+ "longitude": lon,
256
+ },
257
+ name=var_name,
258
+ attrs={"long_name": "MAXUNITSTREAMFLOW", "units": "unknown"},
259
+ ).to_dataset(name=var_name)
260
+ # Add CF-like attributes for time
261
+ empty["time"].attrs = {
262
+ "standard_name": "time",
263
+ "long_name": "time",
264
+ "units": "seconds since 1970-01-01",
265
+ "calendar": "proleptic_gregorian",
266
+ }
267
+ encoding = {
268
+ var_name: {"dtype": "float32", "chunks": chunks, "compressors": (compressor,)},
269
+ # store as float64 seconds
270
+ "time": {"dtype": "float64"},
271
+ }
272
+ empty.to_zarr(
273
+ output_zarr,
274
+ mode="w",
275
+ consolidated=True,
276
+ encoding=encoding,
277
+ zarr_format=3,
278
+ )
279
+
280
+
281
+ def append_day(
282
+ output_zarr: str,
283
+ var_name: str,
284
+ day_data: np.ndarray,
285
+ day_times: Sequence[np.datetime64],
286
+ lat: xr.DataArray,
287
+ lon: xr.DataArray,
288
+ chunks: Tuple[int, int, int],
289
+ compressor: ZstdCodec,
290
+ ) -> None:
291
+ if day_data.size == 0:
292
+ return
293
+ # Convert filename datetimes to epoch seconds (float64)
294
+ time_seconds = datetime64_to_epoch_seconds(day_times)
295
+
296
+ ds = xr.Dataset(
297
+ {
298
+ var_name: xr.DataArray(
299
+ day_data,
300
+ dims=("time", "latitude", "longitude"),
301
+ coords={"time": time_seconds, "latitude": lat, "longitude": lon},
302
+ attrs={"long_name": "MAXUNITSTREAMFLOW", "units": "unknown"},
303
+ )
304
+ }
305
+ )
306
+ # Ensure time coordinate attributes persist on append
307
+ ds["time"].attrs = {
308
+ "standard_name": "time",
309
+ "long_name": "time",
310
+ "units": "seconds since 1970-01-01",
311
+ "calendar": "proleptic_gregorian",
312
+ }
313
+ # Ensure no per-variable encodings are carried into append (xarray will error if present)
314
+ if var_name in ds:
315
+ ds[var_name].encoding = {}
316
+ if "time" in ds:
317
+ ds["time"].encoding = {}
318
+ if "latitude" in ds:
319
+ ds["latitude"].encoding = {}
320
+ if "longitude" in ds:
321
+ ds["longitude"].encoding = {}
322
+ ds.to_zarr(
323
+ output_zarr,
324
+ mode="a",
325
+ append_dim="time",
326
+ consolidated=False,
327
+ zarr_format=3,
328
+ )
329
+
330
+
331
+ def get_existing_max_time(output_zarr: str) -> Optional[np.datetime64]:
332
+ p = Path(output_zarr)
333
+ if not p.exists():
334
+ return None
335
+ try:
336
+ ds_out = xr.open_zarr(output_zarr, consolidated=True, decode_times=False)
337
+ except Exception:
338
+ ds_out = xr.open_zarr(output_zarr, consolidated=False, decode_times=False)
339
+ try:
340
+ if "time" in ds_out.coords and ds_out.sizes.get("time", 0) > 0:
341
+ tvar = ds_out["time"]
342
+ tvals = tvar.values
343
+ last = tvals[-1]
344
+ # Cases: datetime64, numeric epoch, or cftime/object
345
+ if isinstance(last, np.datetime64):
346
+ return last.astype("datetime64[ns]")
347
+ if np.issubdtype(tvals.dtype, np.datetime64):
348
+ return np.array(last, dtype="datetime64[ns]")
349
+ if np.issubdtype(tvals.dtype, np.number):
350
+ units = (tvar.attrs or {}).get("units", "seconds since 1970-01-01")
351
+ base_match = re.match(r"(seconds|milliseconds|microseconds|nanoseconds|minutes|hours|days) since (\d{4}-\d{2}-\d{2})", units)
352
+ if base_match:
353
+ unit, base_date = base_match.groups()
354
+ base = np.datetime64(base_date, "s")
355
+ # Map unit to numpy timedelta unit
356
+ unit_map = {
357
+ "nanoseconds": "ns",
358
+ "microseconds": "us",
359
+ "milliseconds": "ms",
360
+ "seconds": "s",
361
+ "minutes": "m",
362
+ "hours": "h",
363
+ "days": "D",
364
+ }
365
+ np_unit = unit_map.get(unit, "s")
366
+ delta = np.array(last, dtype=f"timedelta64[{np_unit}]")
367
+ return (base + delta).astype("datetime64[ns]")
368
+ # Fallback treat as seconds since epoch
369
+ base = np.datetime64("1970-01-01", "s")
370
+ delta = np.array(last, dtype="timedelta64[s]")
371
+ return (base + delta).astype("datetime64[ns]")
372
+ # Attempt string conversion (e.g., cftime)
373
+ try:
374
+ return np.datetime64(str(last)).astype("datetime64[ns]")
375
+ except Exception:
376
+ return None
377
+ return None
378
+ finally:
379
+ ds_out.close()
380
+
381
+
382
+ def main() -> None:
383
+ args = parse_args()
384
+
385
+ input_root = Path(args.input_root)
386
+ template_zarr = args.template_zarr
387
+ output_zarr = args.output_zarr
388
+
389
+ start_date = dt.datetime.strptime(args.start, "%Y-%m-%d").date()
390
+ end_date = dt.datetime.strptime(args.end, "%Y-%m-%d").date()
391
+
392
+ # Load template lat/lon from precip Zarr
393
+ ds_tpl = xr.open_zarr(template_zarr, consolidated=True)
394
+ try:
395
+ lat = ds_tpl["latitude"].load()
396
+ lon = ds_tpl["longitude"].load()
397
+ finally:
398
+ ds_tpl.close()
399
+
400
+ lat_size, lon_size = int(lat.size), int(lon.size)
401
+
402
+ var_name = "maxunitstreamflow"
403
+ chunks = (1, int(args.rechunk_lat), int(args.rechunk_lon))
404
+ compressor = ZstdCodec(level=3)
405
+
406
+ # Ensure store exists with metadata
407
+ ensure_output_store(output_zarr, var_name, lat, lon, chunks, compressor)
408
+
409
+ existing_max_time = get_existing_max_time(output_zarr)
410
+ if existing_max_time is not None:
411
+ print(f"Resuming after {existing_max_time}")
412
+
413
+ for day in daterange(start_date, end_date):
414
+ day_dir = input_root / day.strftime("%Y%m%d")
415
+ gribs = list_grib2_files(day_dir)
416
+ if not gribs:
417
+ continue
418
+ # Filter by resume point
419
+ if existing_max_time is not None:
420
+ gribs = [
421
+ f
422
+ for f in gribs
423
+ if (t := extract_datetime_from_name(f)) is not None and t > existing_max_time
424
+ ]
425
+ if not gribs:
426
+ continue
427
+ print(f"Processing {day_dir} with {len(gribs)} files ...")
428
+ day_data, day_times = build_day_arrays(
429
+ gribs,
430
+ lat_size=lat_size,
431
+ lon_size=lon_size,
432
+ workers=int(args.workers),
433
+ test_limit=int(args.test_limit),
434
+ )
435
+ if day_data.size == 0:
436
+ continue
437
+ append_day(
438
+ output_zarr,
439
+ var_name,
440
+ day_data,
441
+ day_times,
442
+ lat,
443
+ lon,
444
+ chunks,
445
+ compressor,
446
+ )
447
+
448
+ # Consolidate metadata at the end for faster reads
449
+ try:
450
+ import zarr as zarr_pkg
451
+
452
+ zarr_pkg.consolidate_metadata(output_zarr)
453
+ except Exception as e:
454
+ print(f"Consolidate metadata skipped: {e}")
455
+
456
+
457
+ if __name__ == "__main__":
458
+ main()
streamflow_stats.npz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9b4318e6a9251af25ab447ffaa3cd4311efccbda24b4ffb6a7b46f2b3f90c708
3
+ size 1644
streamflow_stats_pixelwise.npz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:017ef1b6d4c3a03792eb871eaad82cc0a381a34a56f3c121c28cddceda59f9a0
3
+ size 64113653
wm_usa.tif ADDED

Git LFS Details

  • SHA256: fc439fa8557ac5303de2385abfb473ee75c3aa25de0451d280bbfb6be052fde6
  • Pointer size: 133 Bytes
  • Size of remote file: 12.9 MB
wofs_0704/subset_wofs_ALL_00_20250704_0600_0600.nc ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:815dc767376a206715e54c9321d27e74f6ad783d96b9cb189dd239fa90a41559
3
+ size 11209429
wofs_0704/subset_wofs_ALL_01_20250704_0600_0605.nc ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bec15398a203d3b4a90cd241ebe400b26bea314a7c18a9266bbbf4aef11bf267
3
+ size 14002254
wofs_0704/subset_wofs_ALL_02_20250704_0600_0610.nc ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4a1628208e4e2b8bb8ff9f69054cb17d3726e539c5c82817e28ccc2417be8da2
3
+ size 13894159
wofs_0704/subset_wofs_ALL_03_20250704_0600_0615.nc ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bff833df55aec541dfef420b08de29c65f3c55246c21aff1bc904eda50406834
3
+ size 13838402
wofs_0704/subset_wofs_ALL_04_20250704_0600_0620.nc ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6dc94c29760ed5488c43b45c087b1b684bd605d13dca3333334b3d6d7ad3c2f8
3
+ size 13778022
wofs_0704/subset_wofs_ALL_05_20250704_0600_0625.nc ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a69eaed3eb49f0be766655da9310adc8fe55d5fcb0a9610c98130d3b223f8275
3
+ size 13732094
wofs_0704/subset_wofs_ALL_06_20250704_0600_0630.nc ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c38538fcc538a8d5a3ca6dd62d64429966c409edff510005b71a3308209b1f61
3
+ size 13690291
wofs_0704/subset_wofs_ALL_07_20250704_0600_0635.nc ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:372dae3602ab259f8432b67079e8fee69134bb11ffa1f55abee52380578e77d0
3
+ size 13657006
wofs_0704/subset_wofs_ALL_08_20250704_0600_0640.nc ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0306775198e62dcacfd8c878ecd64c44c0efd5d7ca5ac7b99f431fa2564fe12b
3
+ size 13638117
wofs_0704/subset_wofs_ALL_09_20250704_0600_0645.nc ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6a1758b0f2b0f2fe98b869ae308457aa21da9f7362c92058352ec90c6193779a
3
+ size 13628412
wofs_0704/subset_wofs_ALL_10_20250704_0600_0650.nc ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:22046e24f47ee558425f190524f96e34071c5c1e11b373c0ca18d08330db67fe
3
+ size 13620537
wofs_0704/subset_wofs_ALL_11_20250704_0600_0655.nc ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:933dbbeee965aad2c4c028af158d58edbaa509622976f5e5f249214a390faed3
3
+ size 13620605
wofs_0704/subset_wofs_ALL_12_20250704_0600_0700.nc ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3ff6b413783ced2762b1abe24c8c98de078250e1bd7816d558ee5decc1517d53
3
+ size 13616473
wofs_0704/subset_wofs_ALL_13_20250704_0600_0705.nc ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9b0660222e0f61c94c1524b68a764a992d6d7838effae33d7e6e48cdb62d215f
3
+ size 13617577
wofs_0704/subset_wofs_ALL_14_20250704_0600_0710.nc ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8b5c5e1c38accc94cf5d0045e80ceb25608a8173158448261354f620c3087f83
3
+ size 13617353
wofs_0704/subset_wofs_ALL_15_20250704_0600_0715.nc ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:263377d3def9b8a48e18d2927e5b4a1565c8e9da3615dbb97a07e63e27001ec7
3
+ size 13616608
wofs_0704/subset_wofs_ALL_16_20250704_0600_0720.nc ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a5924c4e53f40ba846840ce219b62a860a010b28d53e169b3556ea8e8002b9da
3
+ size 13615802
wofs_0704/subset_wofs_ALL_17_20250704_0600_0725.nc ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c5b7435e1700684d8ee841b6ff71299d3bf1685b721b4cdd5d4945fbb37afa7c
3
+ size 13616502
wofs_0704/subset_wofs_ALL_18_20250704_0600_0730.nc ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:821e9c64ca840209748e304b8e6c2a193205be14ad6e06c018671838937c8da2
3
+ size 13613136
wofs_0704/subset_wofs_ALL_19_20250704_0600_0735.nc ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8bde25aa5fd1841c002b7c8b926020319bdeee502960ae750a9e36fbb7b47c82
3
+ size 13609568
wofs_0704/subset_wofs_ALL_20_20250704_0600_0740.nc ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b6d482ec9fdaa6d3fc05d589bc0b9477f445bc62a97f37c87b2c3216ca352cce
3
+ size 13607294
wofs_0704/subset_wofs_ALL_21_20250704_0600_0745.nc ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:17c8f515c901919fdc93452b6b3973760c0c008c1e87c1c0894f54b45d7c030e
3
+ size 13600734
wofs_0704/subset_wofs_ALL_22_20250704_0600_0750.nc ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:876ca489a4efe9f0236dd65497b998bd3f88a225e5dfbae54bd6e16161507ec8
3
+ size 13598526
wofs_0704/subset_wofs_ALL_23_20250704_0600_0755.nc ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:63fb37b6c50531bef5fe2acc726695c1747c2c56e19a041071ae76c08f70ba5a
3
+ size 13597106
wofs_0704/subset_wofs_ALL_24_20250704_0600_0800.nc ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7eaa7977c4549d14138c3a22961898e035423e3e49a10629e51b5c719f512f7c
3
+ size 13592769
wofs_0704/subset_wofs_ALL_25_20250704_0600_0805.nc ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:eb45d2bc2230769ffe4597eba4a99a96b19f7478ab3cf7702898c3d67e0f75d6
3
+ size 13586760
wofs_0704/subset_wofs_ALL_26_20250704_0600_0810.nc ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:810e526aa83eaffe45ebe31c7a6ddced302b9ceef9d11399db28420531d02857
3
+ size 13581359
wofs_0704/subset_wofs_ALL_27_20250704_0600_0815.nc ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:073cd0535bc3ebffcd8767d221c19082b8121ed1937817d7e3da4f780a7f1571
3
+ size 13574901
wofs_0704/subset_wofs_ALL_28_20250704_0600_0820.nc ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:95367ffe208c7ee49e2690df96f276faf955d4766f854555e05c5365d5c62560
3
+ size 13568685
wofs_0704/subset_wofs_ALL_29_20250704_0600_0825.nc ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a22bb0432595b3c3efceac0638f552ece4748dc03efd5f62e1bd938d0ea43633
3
+ size 13566876
wofs_0704/subset_wofs_ALL_30_20250704_0600_0830.nc ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d0b187d8118dbd906b2b7968f354cf13d74cb18b6f70d85137112fe713545e60
3
+ size 13560053
wofs_0704/subset_wofs_ALL_31_20250704_0600_0835.nc ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9cfb5c804043dfc0e05929f4445d846c80b6a67999ab03cf1f7969b54446d4e8
3
+ size 13555534
wofs_0704/subset_wofs_ALL_32_20250704_0600_0840.nc ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4a5ae97f58a50b1cfdd0ad7938dc8da5fa1bd983984c1e95b8acaf402d117871
3
+ size 13549326