switch to secure AWS S3 signed urls; loading all tifs from S3
Browse files- .gitignore +2 -1
- inference.py +23 -4
- requirements.txt +1 -0
.gitignore
CHANGED
|
@@ -3,4 +3,5 @@ __pycache__
|
|
| 3 |
.DS_Store
|
| 4 |
data/*
|
| 5 |
!data/dataset_info.json
|
| 6 |
-
!data/chips_stats.yaml
|
|
|
|
|
|
| 3 |
.DS_Store
|
| 4 |
data/*
|
| 5 |
!data/dataset_info.json
|
| 6 |
+
!data/chips_stats.yaml
|
| 7 |
+
.env
|
inference.py
CHANGED
|
@@ -11,9 +11,14 @@ import numpy as np
|
|
| 11 |
from rasterio.features import shapes
|
| 12 |
from shapely.geometry import shape
|
| 13 |
import geopandas as gpd
|
|
|
|
|
|
|
| 14 |
|
| 15 |
from messis.messis import LogConfusionMatrix
|
| 16 |
|
|
|
|
|
|
|
|
|
|
| 17 |
class InferenceDataLoader:
|
| 18 |
def __init__(self, features_path, labels_path, field_ids_path, stats_path, window_size=224, n_timesteps=3, fold_indices=None, debug=False):
|
| 19 |
self.features_path = features_path
|
|
@@ -198,11 +203,25 @@ def crop_predictions_to_gdf(field_ids, targets, predictions, transform, crs, cla
|
|
| 198 |
|
| 199 |
return gdf
|
| 200 |
|
| 201 |
-
def
|
| 202 |
-
|
| 203 |
-
|
| 204 |
-
|
|
|
|
| 205 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 206 |
stats_path = "./data/chips_stats.yaml"
|
| 207 |
dataset_info_path = "./data/dataset_info.json"
|
| 208 |
|
|
|
|
| 11 |
from rasterio.features import shapes
|
| 12 |
from shapely.geometry import shape
|
| 13 |
import geopandas as gpd
|
| 14 |
+
import boto3
|
| 15 |
+
from dotenv import load_dotenv
|
| 16 |
|
| 17 |
from messis.messis import LogConfusionMatrix
|
| 18 |
|
| 19 |
+
# Load environment variables from .env file
|
| 20 |
+
load_dotenv()
|
| 21 |
+
|
| 22 |
class InferenceDataLoader:
|
| 23 |
def __init__(self, features_path, labels_path, field_ids_path, stats_path, window_size=224, n_timesteps=3, fold_indices=None, debug=False):
|
| 24 |
self.features_path = features_path
|
|
|
|
| 203 |
|
| 204 |
return gdf
|
| 205 |
|
| 206 |
+
def generate_presigned_url(bucket_name, object_key, expiration=3600):
|
| 207 |
+
"""Generate a pre-signed URL for an S3 object."""
|
| 208 |
+
s3_client = boto3.client('s3', region_name='eu-central-1')
|
| 209 |
+
presigned_url = s3_client.generate_presigned_url('get_object', Params={'Bucket': bucket_name, 'Key': object_key}, ExpiresIn=expiration)
|
| 210 |
+
return presigned_url
|
| 211 |
|
| 212 |
+
def perform_inference(lon, lat, model, config, debug=False):
|
| 213 |
+
# Bucket and object paths
|
| 214 |
+
bucket_name = 'messis-demo'
|
| 215 |
+
features_key = 'stacked_features_cog.tif'
|
| 216 |
+
labels_key = 'labels_cog.tif'
|
| 217 |
+
field_ids_key = 'field_ids_cog.tif'
|
| 218 |
+
|
| 219 |
+
# Generate pre-signed URLs for the GeoTIFF files
|
| 220 |
+
features_path = generate_presigned_url(bucket_name, features_key)
|
| 221 |
+
labels_path = generate_presigned_url(bucket_name, labels_key)
|
| 222 |
+
field_ids_path = generate_presigned_url(bucket_name, field_ids_key)
|
| 223 |
+
|
| 224 |
+
# Local paths for stats and dataset info
|
| 225 |
stats_path = "./data/chips_stats.yaml"
|
| 226 |
dataset_info_path = "./data/dataset_info.json"
|
| 227 |
|
requirements.txt
CHANGED
|
@@ -20,3 +20,4 @@ numpy==1.26.4
|
|
| 20 |
lion-pytorch==0.2.2
|
| 21 |
timm==0.9.16
|
| 22 |
pyproj
|
|
|
|
|
|
| 20 |
lion-pytorch==0.2.2
|
| 21 |
timm==0.9.16
|
| 22 |
pyproj
|
| 23 |
+
boto3
|