hkayabilisim commited on
Commit
f3b2a27
·
1 Parent(s): 7fa16fb

Add AWS S3 support to save objects

Browse files
pyproject.toml CHANGED
@@ -20,8 +20,8 @@ dependencies = [
20
  "networkx",
21
  "openpyxl",
22
  "rasterio",
23
- "xml",
24
  "boto3",
 
25
  ]
26
 
27
  [tool.hatch.version]
 
20
  "networkx",
21
  "openpyxl",
22
  "rasterio",
 
23
  "boto3",
24
+ "cryptography",
25
  ]
26
 
27
  [tool.hatch.version]
tomorrowcities/content/articles/welcome.md CHANGED
@@ -17,7 +17,8 @@ category:
17
  TCDSE is a web application designed to conduct computational tasks to generate information needed for decision mechanisms in designing future cities. The web application, which will be referred as TCDSE for short, contains a computational engine capable of executing several hazard scenarios on different exposure datasets and infrastructures.
18
 
19
  ## What is New?
20
- * **New** The engine now can parse, display vulnerability curves located on [Global Vulnerability Model Reposity](https://github.com/gem/global_vulnerability_model) maintained by [Global Earthquake Model Foundation)](https://www.globalquakemodel.org/gem). To see the new fatures, download one of the XML files in [Global Vulnerability Model Reposity](https://github.com/gem/global_vulnerability_model) and drag and drop to [Engine](/engine). The engine will read all the vulnerability functions defined in the XML file and display them.
 
21
  * basemap is changed to ESri.WorldImagery to see the landscapes especially rivers.
22
  * utilities page is added.
23
  * Excel to GeoJSON converted is added to utilities page.
 
17
  TCDSE is a web application designed to conduct computational tasks to generate information needed for decision mechanisms in designing future cities. The web application, which will be referred as TCDSE for short, contains a computational engine capable of executing several hazard scenarios on different exposure datasets and infrastructures.
18
 
19
  ## What is New?
20
+ * **New** Amazon S3 support to save session or policy database.
21
+ * The engine now can parse, display vulnerability curves located on [Global Vulnerability Model Reposity](https://github.com/gem/global_vulnerability_model) maintained by [Global Earthquake Model Foundation)](https://www.globalquakemodel.org/gem). To see the new fatures, download one of the XML files in [Global Vulnerability Model Reposity](https://github.com/gem/global_vulnerability_model) and drag and drop to [Engine](/engine). The engine will read all the vulnerability functions defined in the XML file and display them.
22
  * basemap is changed to ESri.WorldImagery to see the landscapes especially rivers.
23
  * utilities page is added.
24
  * Excel to GeoJSON converted is added to utilities page.
tomorrowcities/pages/__init__.py CHANGED
@@ -10,7 +10,7 @@ from cryptography.fernet import Fernet
10
 
11
  from ..data import articles
12
 
13
- route_order = ["/", "docs","engine","utilities","settings","account"]
14
 
15
  def check_auth(route, children):
16
  # This can be replaced by a custom function that checks if the user is
@@ -18,9 +18,8 @@ def check_auth(route, children):
18
 
19
  # routes that are public or only for admin
20
  # the rest only requires login
21
- public_paths = ["/","docs","engine","utilities","account"]
22
- admin_paths = ["settings"]
23
-
24
 
25
  if route.path in public_paths:
26
  children_auth = children
@@ -34,121 +33,6 @@ def check_auth(route, children):
34
  children_auth = children
35
  return children_auth
36
 
37
-
38
- class S3Storage:
39
- def __init__(self, aws_access_key_id, aws_secret_access_key, region_name, bucket_name):
40
- self.aws_access_key_id = aws_access_key_id
41
- self.aws_secret_access_key = aws_secret_access_key
42
- self.region_name = region_name
43
- self.bucket_name = bucket_name
44
- self.s3 = self.connect()
45
- def connect(self):
46
- session = boto3.Session(
47
- aws_access_key_id=self.aws_access_key_id,
48
- aws_secret_access_key=self.aws_secret_access_key,
49
- region_name=self.region_name
50
- )
51
- return session.client('s3')
52
-
53
- def is_alive(self):
54
- try:
55
- buckets = self.s3.list_buckets()
56
- for bucket in buckets['Buckets']:
57
- if bucket['Name'] == self.bucket_name:
58
- return True
59
- return False
60
- except:
61
- return False
62
-
63
- def upload_file(self, file_name, object_name=None):
64
- if object_name is None:
65
- object_name = file_name
66
- self.s3.upload_file(file_name, self.bucket_name, object_name)
67
- return f"https://{self.bucket_name}.s3.amazonaws.com/{object_name}"
68
-
69
- def load_metadata(self, session_name):
70
- # Use the get_object method to read the file
71
- self.s3.download_file(self.bucket_name, f'{session_name}.metadata', f'/tmp/{session_name}.metadata')
72
-
73
- with open(f'/tmp/{session_name}.metadata', 'rb') as fileObj:
74
- # Access the content of the file from the response
75
- metadata = pickle.load(fileObj)
76
-
77
- print(type(metadata))
78
-
79
- return metadata
80
-
81
- def load_object(self, object_name):
82
- # Use the get_object method to read the file
83
- self.s3.download_file(self.bucket_name, object_name, f'/tmp/{object_name}')
84
- print(f'Downloading {object_name}')
85
-
86
- with open(f'/tmp/{object_name}', 'rb') as fileObj:
87
- # Access the content of the file from the response
88
- data = pickle.load(fileObj)
89
- return data
90
-
91
- def load_data(self, session_name):
92
- return self.load_object(f'{session_name}.data')
93
-
94
- def list_objects(self):
95
- objects = self.s3.list_objects(Bucket=self.bucket_name)
96
- objects_array = set()
97
- for obj in objects.get('Contents', []):
98
- objects_array.add(obj["Key"].split('.')[:-1][0])
99
- return [a for a in objects_array]
100
-
101
- def list_sessions(self):
102
- objects = self.list_objects()
103
- return [o for o in objects if "TCDSE_SESSION" in o]
104
-
105
- def get_S3(master_key:str, aws_access_key_id: str, aws_secret_access_key: str, region_name: str, bucket_name: str):
106
- fernet = Fernet(master_key)
107
-
108
- dec_aws_access_key_id = fernet.decrypt(aws_access_key_id).decode()
109
- dec_aws_secret_access_key = fernet.decrypt(aws_secret_access_key).decode()
110
- dec_region_name = fernet.decrypt(region_name).decode()
111
- dec_bucket_name = fernet.decrypt(bucket_name).decode()
112
- s3 = S3Storage(dec_aws_access_key_id, dec_aws_secret_access_key, dec_region_name, dec_bucket_name)
113
- s3.connect()
114
- return s3
115
-
116
- def revive_storage():
117
- if 'master_key' in os.environ:
118
- print('reading storage from env')
119
- return get_S3(os.environ['master_key'],
120
- os.environ['aws_access_key_id'],
121
- os.environ['aws_secret_access_key'],
122
- os.environ['region_name'],
123
- os.environ['bucket_name'])
124
- return None
125
-
126
-
127
- storage = solara.reactive(cast(Optional[S3Storage], revive_storage()))
128
-
129
-
130
-
131
-
132
-
133
-
134
- def storage_control(master_key:str, aws_access_key_id: str, aws_secret_access_key: str, region_name: str, bucket_name: str):
135
- fernet = Fernet(master_key)
136
-
137
- dec_aws_access_key_id = fernet.decrypt(aws_access_key_id).decode()
138
- dec_aws_secret_access_key = fernet.decrypt(aws_secret_access_key).decode()
139
- dec_region_name = fernet.decrypt(region_name).decode()
140
- dec_bucket_name = fernet.decrypt(bucket_name).decode()
141
- storage.value = S3Storage(dec_aws_access_key_id, dec_aws_secret_access_key, dec_region_name, dec_bucket_name)
142
- storage.value.connect()
143
- os.environ['master_key'] = master_key
144
- os.environ['aws_access_key_id'] = aws_access_key_id
145
- os.environ['aws_secret_access_key'] = aws_secret_access_key
146
- os.environ['region_name'] = region_name
147
- os.environ['bucket_name'] = bucket_name
148
-
149
- def storage_disconnect():
150
- storage.value = None
151
-
152
  @dataclasses.dataclass
153
  class User:
154
  username: str
@@ -179,8 +63,6 @@ def LoginForm():
179
  solara.Markdown(
180
  """
181
  This is an example login form.
182
-
183
- * use admin/admin to login as admin.
184
  * use test/test to login as a normal user.
185
  """
186
  )
@@ -208,12 +90,6 @@ def Layout(children=[]):
208
  with solara.lab.Tabs(align="center"):
209
  for route in routes:
210
  name = route.path if route.path != "/" else "Welcome"
211
- is_admin = user.value and user.value.admin
212
- # we could skip the admin tab if the user is not an admin
213
- if route.path == "settings" and not is_admin:
214
- continue
215
- if user.value is not None and route.path == "logon":
216
- continue
217
  # in this case we disable the tab
218
  solara.lab.Tab(name, path_or_route=route, disabled=False)
219
  if user.value:
 
10
 
11
  from ..data import articles
12
 
13
+ route_order = ["/", "docs","engine","utilities","policies","settings","account"]
14
 
15
  def check_auth(route, children):
16
  # This can be replaced by a custom function that checks if the user is
 
18
 
19
  # routes that are public or only for admin
20
  # the rest only requires login
21
+ public_paths = ["/","docs","engine","utilities","policies","settings","account"]
22
+ admin_paths = [""]
 
23
 
24
  if route.path in public_paths:
25
  children_auth = children
 
33
  children_auth = children
34
  return children_auth
35
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
36
  @dataclasses.dataclass
37
  class User:
38
  username: str
 
63
  solara.Markdown(
64
  """
65
  This is an example login form.
 
 
66
  * use test/test to login as a normal user.
67
  """
68
  )
 
90
  with solara.lab.Tabs(align="center"):
91
  for route in routes:
92
  name = route.path if route.path != "/" else "Welcome"
 
 
 
 
 
 
93
  # in this case we disable the tab
94
  solara.lab.Tab(name, path_or_route=route, disabled=False)
95
  if user.value:
tomorrowcities/pages/account.py CHANGED
@@ -5,55 +5,10 @@ import pprint
5
 
6
  from . import user
7
  from . import LoginForm
8
- from . import storage, storage_control, storage_disconnect
9
- from .engine import layers, load_from_state
10
 
11
  @solara.component
12
- def Page(name: Optional[str] = None, page: int = 0, page_size=100):
13
- aws_access_key_id = solara.use_reactive('gAAAAABlNYcieCqS_iKRhX_J4LXea2hZ7UOqml4ebclJuGJaNpf0h_vOIYiYqLvPmHly8y8hcxBYr-_YUroLb5UG95xpPYTMjEhH_roobgY7hd7hTxJ9mB4=')
14
- aws_secret_access_key = solara.use_reactive('gAAAAABlNYeL1SvCXf6DePlh2I6U_rrd9Izv1QlR6U9eev00bloKdUApf1r29l3UUh6G1PS9DagGLF228f3peWCrWdgZgT_-gUV3ueV6nHR9_QERYCWr0iaAUpcmqPVzIy81ESHqNZ2Z')
15
- master_key = solara.use_reactive('')
16
- region_name = solara.use_reactive('gAAAAABlNYfC41QcaN-_OwD0XwHGOY8BD38YtNXlvd8dWT74aUoLeERtw1OADP_jIKMqDAqvRMnoioqHONI6-um3NCeKlbG2rw==')
17
- bucket_name = solara.use_reactive('gAAAAABlNYfYJwQCh6S9bO2npj7Qd1r9riEsGHxw6LAK5xz1DreQv7cpHdmepFtLhB8DlNDEKRDsRsFXD9zRPVMMc5JDcYiMQQ==')
18
-
19
- session_name = solara.use_reactive(None)
20
- session_list = solara.use_reactive([])
21
-
22
-
23
-
24
-
25
-
26
- def load_selected_session():
27
- data = storage.value.load_data(session_name.value)
28
- pprint.pprint(data)
29
- load_from_state(data)
30
-
31
- def refresh_session_list():
32
- session_list.set(sorted(storage.value.list_sessions(),reverse=True))
33
-
34
- #for k in ['aws_access_key_id', 'aws_secret_access_key','region_name','bucket_name']:
35
- # solara.Text(f'{k} --> {os.environ[k]}')
36
  solara.Title("TCDSE » Account")
37
  if user.value is None:
38
  LoginForm()
39
- else:
40
- if storage.value is None:
41
- with solara.Card(title='Attaching AWS S3',subtitle='Please attach an S3 bucket to save workspace sessions'):
42
- solara.InputText(label='Master Key',value=master_key, password=True)
43
- solara.InputText(label='AWS Access Key ID', value=aws_access_key_id, password=True,disabled=True)
44
- solara.InputText(label='AWS Secret Access Key', value=aws_secret_access_key,password=True,disabled=True)
45
- solara.InputText(label='AWS Region Name', value=region_name,password=True,disabled=True)
46
- solara.InputText(label='Bucket Name', value=bucket_name,password=True,disabled=True)
47
- solara.Button(label="Connect to S3",
48
- on_click=lambda: storage_control(master_key.value, aws_access_key_id.value,
49
- aws_secret_access_key.value,
50
- region_name.value,
51
- bucket_name.value))
52
- else:
53
- with solara.Card(title='Load Session', subtitle='Choose a session from storage'):
54
- solara.Select(label='Choose session',value=session_name.value, values=session_list.value,
55
- on_value=session_name.set)
56
- solara.Button(label="Load", on_click=lambda: load_selected_session())
57
- solara.Button(label="Refresh", on_click=lambda: refresh_session_list())
58
- solara.Button(label="Disconnect from S3",on_click=lambda: storage_disconnect())
59
 
 
5
 
6
  from . import user
7
  from . import LoginForm
 
 
8
 
9
  @solara.component
10
+ def Page():
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
11
  solara.Title("TCDSE » Account")
12
  if user.value is None:
13
  LoginForm()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
14
 
tomorrowcities/pages/engine.py CHANGED
@@ -18,7 +18,7 @@ import logging, sys
18
  #logging.basicConfig(stream=sys.stderr, level=logging.INFO)
19
  import pickle
20
  import datetime
21
- from . import storage, storage_control, storage_disconnect
22
  from ..backend.engine import compute, compute_power_infra, calculate_metrics
23
 
24
 
@@ -793,7 +793,7 @@ def MapViewer():
793
  zoom, set_zoom = solara.use_state(default_zoom)
794
  #center, set_center = solara.use_state(default_center)
795
 
796
- base_map = ipyleaflet.basemaps["Stamen"]["Watercolor"]
797
  base_layer = ipyleaflet.TileLayer.element(url=base_map.build_url())
798
  map_layers = [base_layer]
799
 
 
18
  #logging.basicConfig(stream=sys.stderr, level=logging.INFO)
19
  import pickle
20
  import datetime
21
+ from .settings import storage
22
  from ..backend.engine import compute, compute_power_infra, calculate_metrics
23
 
24
 
 
793
  zoom, set_zoom = solara.use_state(default_zoom)
794
  #center, set_center = solara.use_state(default_center)
795
 
796
+ base_map = ipyleaflet.basemaps["Esri"]["WorldStreetMap"]
797
  base_layer = ipyleaflet.TileLayer.element(url=base_map.build_url())
798
  map_layers = [base_layer]
799
 
tomorrowcities/pages/policies.py ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ import solara
2
+
3
+ @solara.component
4
+ def Page():
5
+ solara.Markdown('Policies')
tomorrowcities/pages/settings.py CHANGED
@@ -1,8 +1,167 @@
1
  import solara
 
 
 
 
2
 
3
  from . import user
4
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
5
  @solara.component
6
- def Page():
7
- assert user.value is not None
8
- solara.Markdown(f"Hi {user.value.username}, you are an admin")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  import solara
2
+ from typing import Optional, cast
3
+ import pickle
4
+ import boto3
5
+ import os
6
 
7
  from . import user
8
 
9
+ class S3Storage:
10
+ def __init__(self, aws_access_key_id, aws_secret_access_key, region_name, bucket_name):
11
+ self.aws_access_key_id = aws_access_key_id
12
+ self.aws_secret_access_key = aws_secret_access_key
13
+ self.region_name = region_name
14
+ self.bucket_name = bucket_name
15
+ self.s3 = self._connect()
16
+
17
+ def _connect(self):
18
+ session = boto3.Session(
19
+ aws_access_key_id=self.aws_access_key_id,
20
+ aws_secret_access_key=self.aws_secret_access_key,
21
+ region_name=self.region_name
22
+ )
23
+ return session.client('s3')
24
+
25
+ def is_alive(self):
26
+ print(self.aws_access_key_id)
27
+ print(self.aws_secret_access_key)
28
+ print(self.region_name)
29
+ print(self.bucket_name)
30
+ buckets = self.s3.list_buckets()
31
+ for bucket in buckets['Buckets']:
32
+ if bucket['Name'] == self.bucket_name:
33
+ return True
34
+ return False
35
+
36
+
37
+ def upload_file(self, file_name, object_name=None):
38
+ if object_name is None:
39
+ object_name = file_name
40
+ self.s3.upload_file(file_name, self.bucket_name, object_name)
41
+ return f"https://{self.bucket_name}.s3.amazonaws.com/{object_name}"
42
+
43
+ def load_metadata(self, session_name):
44
+ # Use the get_object method to read the file
45
+ self.s3.download_file(self.bucket_name, f'{session_name}.metadata', f'/tmp/{session_name}.metadata')
46
+
47
+ with open(f'/tmp/{session_name}.metadata', 'rb') as fileObj:
48
+ # Access the content of the file from the response
49
+ metadata = pickle.load(fileObj)
50
+
51
+ print(type(metadata))
52
+
53
+ return metadata
54
+
55
+ def load_object(self, object_name):
56
+ # Use the get_object method to read the file
57
+ self.s3.download_file(self.bucket_name, object_name, f'/tmp/{object_name}')
58
+ print(f'Downloading {object_name}')
59
+
60
+ with open(f'/tmp/{object_name}', 'rb') as fileObj:
61
+ # Access the content of the file from the response
62
+ data = pickle.load(fileObj)
63
+ return data
64
+
65
+ def load_data(self, session_name):
66
+ return self.load_object(f'{session_name}.data')
67
+
68
+ def list_objects(self):
69
+ objects = self.s3.list_objects(Bucket=self.bucket_name)
70
+ objects_array = set()
71
+ for obj in objects.get('Contents', []):
72
+ objects_array.add(obj["Key"].split('.')[:-1][0])
73
+ return [a for a in objects_array]
74
+
75
+ def list_sessions(self):
76
+ objects = self.list_objects()
77
+ return [o for o in objects if "TCDSE_SESSION" in o]
78
+
79
+ def revive_storage():
80
+ if 'aws_access_key_id' in os.environ:
81
+ print('reviving storage from env')
82
+ return S3Storage(
83
+ os.environ['aws_access_key_id'],
84
+ os.environ['aws_secret_access_key'],
85
+ os.environ['region_name'],
86
+ os.environ['bucket_name'])
87
+
88
+ storage = solara.reactive(revive_storage())
89
+
90
+ def storage_control(aws_access_key_id: str, aws_secret_access_key: str, region_name: str, bucket_name: str):
91
+ storage.value = S3Storage(aws_access_key_id, aws_secret_access_key, region_name, bucket_name)
92
+
93
+
94
+ def storage_disconnect():
95
+ print('Disconnecting S3')
96
+ storage.set(None)
97
+ session_name.set(None)
98
+ session_list.set([])
99
+ os.environ.pop('aws_access_key_id')
100
+ os.environ.pop('aws_secret_access_key')
101
+ os.environ.pop('region_name')
102
+ os.environ.pop('bucket_name')
103
+ if 'aws_access_key_id' in os.environ.keys():
104
+ print('After removing',os.environ['aws_access_key_id'])
105
+
106
+ session_name = solara.reactive(None)
107
+ session_list = solara.reactive([])
108
+
109
+ def refresh_session_list():
110
+ session_list.set(sorted(storage.value.list_sessions(),reverse=True))
111
+
112
+ @solara.component
113
+ def StorageViewer():
114
+ with solara.Card(title='Load Session', subtitle='Choose a session from storage'):
115
+ solara.Select(label='Choose session',value=session_name.value, values=session_list.value,
116
+ on_value=session_name.set)
117
+ solara.Button(label="Refresh", on_click=lambda: refresh_session_list())
118
+
119
+
120
  @solara.component
121
+ def Page(name: Optional[str] = None, page: int = 0, page_size=100):
122
+ aws_access_key_id = solara.use_reactive('')
123
+ aws_secret_access_key = solara.use_reactive('')
124
+ region_name = solara.use_reactive('eu-west-3')
125
+ bucket_name = solara.use_reactive('tcdse')
126
+ err_message, set_err_message = solara.use_state('')
127
+
128
+ def connect_storage(aws_access_key_id,
129
+ aws_secret_access_key,
130
+ region_name,
131
+ bucket_name):
132
+ print('connecting to s3')
133
+ try:
134
+ s3 = S3Storage(aws_access_key_id, aws_secret_access_key, region_name, bucket_name)
135
+ if s3.is_alive():
136
+ storage.value = s3
137
+ # Save for later to revive storage
138
+ os.environ['aws_access_key_id'] = aws_access_key_id
139
+ os.environ['aws_secret_access_key'] = aws_secret_access_key
140
+ os.environ['region_name'] = region_name
141
+ os.environ['bucket_name'] = bucket_name
142
+ set_err_message('')
143
+ except Exception as e:
144
+ set_err_message(str(e))
145
+ print(e)
146
+
147
+ if storage.value is None:
148
+ s3_object = revive_storage()
149
+ if s3_object is not None and s3_object.is_alive():
150
+ storage.set(s3_object)
151
+ StorageViewer()
152
+ else:
153
+ with solara.Card(title='Attaching AWS S3',subtitle='Please attach an S3 bucket to save workspace sessions'):
154
+ solara.InputText(label='AWS Access Key ID', value=aws_access_key_id, on_value=aws_access_key_id.set)
155
+ solara.InputText(label='AWS Secret Access Key', value=aws_secret_access_key,password=True, on_value=aws_secret_access_key.set)
156
+ solara.InputText(label='AWS Region Name', value=region_name,on_value=region_name.set)
157
+ solara.InputText(label='Bucket Name', value=bucket_name, on_value=bucket_name.set)
158
+ solara.Button(label="Connect to S3",
159
+ on_click=lambda: connect_storage(aws_access_key_id.value,
160
+ aws_secret_access_key.value,
161
+ region_name.value,
162
+ bucket_name.value))
163
+ else:
164
+ StorageViewer()
165
+
166
+ if err_message != '':
167
+ solara.Error(err_message)