Datasets:

Languages:
English
License:
Genevski commited on
Commit
bb2c829
·
1 Parent(s): 6995d28

tests and metadata

Browse files
plant_spacing_in_a_row.csv ADDED
@@ -0,0 +1,32 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ plant_spacing
2
+ 23
3
+ 26
4
+ 28
5
+ 18
6
+ 25
7
+ 21
8
+ 26
9
+ 24
10
+ 15
11
+ 27
12
+ 27
13
+ 20
14
+ 27
15
+ 27
16
+ 21
17
+ 25
18
+ 31
19
+ 14
20
+ 34
21
+ 31
22
+ 20
23
+ 30
24
+ 18
25
+ 27
26
+ 24
27
+ 22
28
+ 28
29
+ 21
30
+ 24
31
+ 22
32
+ 22
requirements.txt ADDED
@@ -0,0 +1 @@
 
 
1
+ numpy
targeted_plant_density.txt ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ row spacing: 70cm
2
+ plant spacing: 24cm (avg 24.13 out of 31 distances, see plant_spacing_in_a_row.csv)
3
+ rows per 1000m 1428.571429
4
+ plants in a 1000m row 4166.666667
5
+ plants/ha: ~60k (59524.80952)
test_dataset.py ADDED
@@ -0,0 +1,310 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import re
2
+ import datetime
3
+ import pytest
4
+ import numpy as np
5
+ import pandas as pd
6
+ import rasterio
7
+
8
+ from pathlib import Path
9
+ from PIL import Image
10
+ from PIL.ExifTags import TAGS, GPSTAGS
11
+ from numbers import Rational
12
+ from math import isclose
13
+
14
+ from geopy import distance
15
+ from sklearn.neighbors import NearestNeighbors
16
+
17
+ NUM_LOW_ALT_POINTS = 48
18
+ NUM_AERIAL_POINTS = 378
19
+ FIELD_ALTITUDE = 580
20
+ NUMBER_REGEX = re.compile(r'DJI_(\d*)')
21
+
22
+ @pytest.fixture
23
+ def expected_low_alt_dirs():
24
+ return {
25
+ "14.05.2025": {},
26
+ "19.05.2025": {"3m", "5m"},
27
+ "02.06.2025": {"5m"},
28
+ "17.06.2025": {"10m"},
29
+ }
30
+
31
+ @pytest.fixture
32
+ def expected_reconstructed_files():
33
+ return [
34
+ "dsm.tif",
35
+ "result.tif",
36
+ "result_Blue.tif",
37
+ "result_Green.tif",
38
+ "result_NIR.tif",
39
+ "result_Red.tif",
40
+ "result_RedEdge.tif",
41
+ "index_map/GNDVI.tif",
42
+ "index_map/LCI.tif",
43
+ "index_map/NDRE.tif",
44
+ "index_map/NDVI.tif",
45
+ "index_map/OSAVI.tif",
46
+ "index_map_color/GNDVI_local.tif",
47
+ "index_map_color/LCI_local.tif",
48
+ "index_map_color/NDRE_local.tif",
49
+ "index_map_color/NDVI_local.tif",
50
+ "index_map_color/OSAVI_local.tif",
51
+ ]
52
+
53
+
54
+ @pytest.fixture
55
+ def dates():
56
+ current_dir = Path('.')
57
+ return [date for date in current_dir.glob("*.2025") if date.is_dir()]
58
+
59
+
60
+ @pytest.fixture
61
+ def expected_coordinates():
62
+ points_from_clustering = np.array([
63
+ [42.68728947222222, 23.551391194444445],
64
+ [42.68753313888889, 23.55158558333333],
65
+ [42.68719561111111, 23.551736694444447],
66
+ [42.68777580555555, 23.55177588888889],
67
+ [42.68742836111111, 23.55191866666667],
68
+ [42.68800580555556, 23.551961305555555],
69
+ [42.68706686111111, 23.552049944444445],
70
+ [42.68766652777777, 23.55210197222222],
71
+ [42.68824475, 23.55214391666667],
72
+ [42.68733511111111, 23.552177111111117],
73
+ [42.68790061111111, 23.552279416666668],
74
+ [42.688478277777776, 23.552312611111116],
75
+ [42.68757630555555, 23.552349083333333],
76
+ [42.68693769444444, 23.552375583333333],
77
+ [42.68813625, 23.55246508333333],
78
+ [42.688717277777776, 23.552487361111112],
79
+ [42.68720997222222, 23.55250088888889],
80
+ [42.68781238888889, 23.55252313888889],
81
+ [42.68836805555555, 23.552659472222224],
82
+ [42.68743805555555, 23.55266425],
83
+ [42.68895875, 23.552668944444445],
84
+ [42.68679780555555, 23.552695611111112],
85
+ [42.68804525, 23.55271505555556],
86
+ [42.68708316666666, 23.55283188888889],
87
+ [42.68767447222222, 23.55284063888889],
88
+ [42.68860108333333, 23.55284225],
89
+ [42.68827408333333, 23.55289391666667],
90
+ [42.688826166666665, 23.55298816666667],
91
+ [42.68665552777777, 23.55300711111111],
92
+ [42.68790877777777, 23.55302052777778],
93
+ [42.68732319444444, 23.55302491666667],
94
+ [42.68847563888889, 23.55316630555556],
95
+ [42.68688913888889, 23.55318988888889],
96
+ [42.688146277777776, 23.553206972222224],
97
+ [42.68755158333333, 23.553219694444444],
98
+ [42.68869205555555, 23.55330775],
99
+ [42.68712933333333, 23.553369944444444],
100
+ [42.687776166666666, 23.553416861111117],
101
+ [42.688352888888886, 23.553481416666667],
102
+ [42.68736447222222, 23.55354133333333],
103
+ [42.68800347222222, 23.55361427777778],
104
+ [42.68855961111111, 23.55363263888889],
105
+ [42.68760519444445, 23.553714527777775],
106
+ [42.688229666666665, 23.553808916666668],
107
+ [42.68784438888888, 23.55391138888889],
108
+ [42.68843561111111, 23.553958055555555],
109
+ [42.688079305555554, 23.554097472222224],
110
+ [42.68830763888889, 23.55428075],
111
+ ])
112
+
113
+ assert points_from_clustering.shape == (NUM_LOW_ALT_POINTS, 2)
114
+ return points_from_clustering
115
+
116
+
117
+ def degrees_to_decimal(degrees: Rational, minutes: Rational, seconds: Rational, direction: str):
118
+ assert(isinstance(degrees, Rational)) # PIL.TiffImagePlugin.IFDRational is a subtype of Rational
119
+ assert(isinstance(minutes, Rational))
120
+ assert(isinstance(seconds, Rational))
121
+ degrees = float(degrees)
122
+ minutes = float(minutes)
123
+ seconds = float(seconds)
124
+
125
+ return (degrees + minutes / 60 + seconds / 3600) * (-1 if direction in ['W', 'S'] else 1)
126
+
127
+
128
+ def get_exif_data(path):
129
+ image = Image.open(path)
130
+ exif_data = {}
131
+
132
+ exif = image.getexif()
133
+ assert exif is not None, path
134
+
135
+ for tag, value in exif.items():
136
+ tag_name = TAGS.get(tag, tag)
137
+ exif_data[tag_name] = value
138
+
139
+ assert 'DateTime' in exif_data, path
140
+ exif_data['DateTime'] = datetime.datetime.strptime(exif_data['DateTime'], '%Y:%m:%d %H:%M:%S')
141
+
142
+ assert 'GPSInfo' in exif_data
143
+ gps_info = {}
144
+
145
+ for key,value in exif.get_ifd(0x8825).items():
146
+ decode = GPSTAGS.get(key, key)
147
+ gps_info[decode] = value
148
+
149
+ assert 'GPSLatitude' in gps_info, path
150
+ assert 'GPSLongitude' in gps_info, path
151
+ assert 'GPSAltitude' in gps_info, path
152
+
153
+ for key in gps_info.keys():
154
+ if key == 'GPSLatitude':
155
+ decim = degrees_to_decimal(*gps_info[key], gps_info['GPSLatitudeRef'])
156
+ gps_info[key] = decim
157
+ if key == 'GPSLongitude':
158
+ decim = degrees_to_decimal(*gps_info[key], gps_info['GPSLongitudeRef'])
159
+ gps_info[key] = decim
160
+
161
+ exif_data['decoded_gps_info'] = gps_info
162
+ return exif_data
163
+
164
+
165
+ def test_all_dates_are_present(dates, expected_low_alt_dirs):
166
+ assert len(dates) == len(expected_low_alt_dirs), dates
167
+
168
+
169
+ def test_lowalt_folder_integrity(dates, expected_low_alt_dirs, expected_coordinates):
170
+ NUM_NEIGHBORS = 2
171
+ neighbors = NearestNeighbors(n_neighbors=NUM_NEIGHBORS).fit(expected_coordinates)
172
+
173
+ points = []
174
+ for d in dates:
175
+ # print(f"Testing lowalt folder integrity for {d.name}")
176
+ if d.name == "14.05.2025":
177
+ continue # no low altitude data for this date
178
+ low_alt_dirs = { lad for lad in d.glob("*m") if lad.is_dir() }
179
+
180
+ assert len(low_alt_dirs) > 0
181
+ assert {n.name for n in low_alt_dirs} == expected_low_alt_dirs[d.name], d.name
182
+
183
+ for low_alt_dir in low_alt_dirs:
184
+ altitude = int(low_alt_dir.name[:-1]) + FIELD_ALTITUDE
185
+
186
+ jpegs = list(low_alt_dir.glob("*.JPG"))
187
+ jpegs.sort() # needed for consistency between windows and linux (latter may fail without it)
188
+ assert len(jpegs) == NUM_LOW_ALT_POINTS
189
+
190
+ tifs = list(low_alt_dir.glob("*.TIF"))
191
+ assert len(tifs) == 5 * NUM_LOW_ALT_POINTS
192
+
193
+ found_coordinates = {}
194
+ for f in jpegs:
195
+ ed = get_exif_data(f)
196
+ assert ed['DateTime'].strftime("%d.%m.%Y") == d.name
197
+
198
+ gps = ed['decoded_gps_info']
199
+ assert isclose(gps['GPSAltitude'], altitude, rel_tol=0.02), f'{low_alt_dir}, {f}'
200
+
201
+ point = (gps['GPSLatitude'], gps['GPSLongitude'])
202
+ indices = neighbors.kneighbors(np.expand_dims(point, 0), return_distance=False).flatten()
203
+
204
+ for i in range(NUM_NEIGHBORS):
205
+ nearest_point = tuple(expected_coordinates[indices[i]])
206
+ if nearest_point not in found_coordinates:
207
+ found_coordinates[nearest_point] = point
208
+ break
209
+ else:
210
+ print((
211
+ f"WARN: {f} nearest point {nearest_point} for {point} is already claimed by {found_coordinates[nearest_point]}. "
212
+ f"Distance: {distance.distance(point, nearest_point).m}"
213
+ )
214
+ )
215
+
216
+ assert nearest_point is not None, (f, point)
217
+ assert distance.distance(point, nearest_point).m < 5.95, (f, point, nearest_point)
218
+
219
+ points.append(
220
+ {
221
+ 'date': d.name,
222
+ 'altitude': gps['GPSAltitude'],
223
+ 'height': altitude - FIELD_ALTITUDE,
224
+ 'file': str(f),
225
+ 'geometry': point,
226
+ 'x': point[0],
227
+ 'y': point[1],
228
+ 'ref_x': nearest_point[0],
229
+ 'ref_y': nearest_point[1],
230
+ }
231
+ )
232
+
233
+ jpeg_number = int(NUMBER_REGEX.match(f.name).group(1))
234
+ assert jpeg_number > 0 and jpeg_number < 9999
235
+ tif_files = [f.parent / f"DJI_{jpeg_number + i:04d}.TIF" for i in range(1, 6)]
236
+
237
+ for tif in tif_files:
238
+ assert tif.exists(), tif
239
+ tif_ed = get_exif_data(tif)
240
+ assert tif_ed['DateTime'].strftime("%d.%m.%Y") == d.name, tif
241
+ tif_gps = tif_ed['decoded_gps_info']
242
+
243
+ # expected that coords and altitude will be an exact match, but turned out there are slight differences
244
+ assert isclose(tif_gps['GPSAltitude'], gps['GPSAltitude'], rel_tol=1e-3), tif
245
+ assert isclose(tif_gps['GPSLatitude'], gps['GPSLatitude'], rel_tol=1e-7), tif
246
+ assert isclose(tif_gps['GPSLongitude'], gps['GPSLongitude'], rel_tol=1e-7), tif
247
+ assert len(found_coordinates) == NUM_LOW_ALT_POINTS, (d.name, low_alt_dir.name)
248
+
249
+ pd.DataFrame(points).to_csv('points.csv', index=False)
250
+
251
+
252
+ def test_aerial_folder_integrity(dates):
253
+
254
+ for d in dates:
255
+ # print(f"Testing aerial folder integrity for {d.name}")
256
+ aerial = d / "aerial"
257
+ assert aerial.exists()
258
+
259
+ jpegs = list(aerial.glob("**/*.JPG"))
260
+ assert len(jpegs) >= NUM_AERIAL_POINTS and len(jpegs) <= NUM_AERIAL_POINTS + 2
261
+
262
+ tifs = list(aerial.glob("**/*.TIF"))
263
+ assert len(tifs) == len(jpegs) * 5
264
+
265
+ for f in jpegs + tifs:
266
+ ed = get_exif_data(f)
267
+ assert ed['DateTime'].strftime("%d.%m.%Y") == d.name
268
+
269
+
270
+ def test_terra_folder_integrity(dates, expected_reconstructed_files):
271
+ for d in dates:
272
+ for subdir in [d / "terra/default", d / "terra/lu"]:
273
+ assert subdir.exists()
274
+ assert subdir.is_dir()
275
+
276
+ assert {f.name for f in subdir.iterdir()} == {"map", "mission.json"}, subdir
277
+ assert {f.name for f in (subdir / "map").iterdir() if f.is_dir()} == {"index_map", "index_map_color" }, subdir / "map"
278
+ assert {f.name for f in (subdir / "map/index_map").iterdir() if f.is_dir()} == set(), subdir / "map/index_map"
279
+ assert {f.name for f in (subdir / "map/index_map_color").iterdir() if f.is_dir()} == set(), subdir / "map/index_map_color"
280
+ assert (subdir / "map/SDK_Log.txt").exists() == False, subdir # privacy concerns, not part of the published dataset
281
+
282
+ for f in [subdir / "map" / f for f in expected_reconstructed_files]:
283
+ assert f.exists()
284
+ assert f.is_file()
285
+ dataset = rasterio.open(f)
286
+ if not str(f).endswith("dsm.tif"):
287
+ print(dataset.width, dataset.height, f) # 11597 12029
288
+ assert dataset.width >= 11596 and dataset.width <= 12035, f
289
+ assert dataset.height >=12028 and dataset.height <= 12388, f
290
+
291
+ if not str(f).endswith("_local.tif"):
292
+ assert dataset.crs.to_epsg() == 4326, f
293
+ b = dataset.bounds
294
+ assert isclose(b.left, 23.550687255395935, rel_tol=1e-5), f
295
+ assert isclose(b.right, 23.554677806626483, rel_tol=1e-5), f
296
+ assert isclose(b.top, 42.689341982307795, rel_tol=1e-5), f
297
+ assert isclose(b.bottom, 42.68628839237808, rel_tol=1e-5), f
298
+ assert f.with_suffix(".prj").exists()
299
+ assert f.with_suffix(".tfw").exists()
300
+
301
+ if "index_map" in str(f) and "index_map_color" not in str(f): # vegetation indices should be in [-1, 1]
302
+ data = dataset.read()
303
+ assert np.nanmin(data) >= -1
304
+ assert np.nanmax(data) <= 1
305
+
306
+
307
+ def test_extra_folder_exists(dates):
308
+ for d in dates:
309
+ extra_dir = (d / "extra")
310
+ assert extra_dir.exists() == False, extra_dir.absolute() # Not part of the published dataset