idx
int64
0
251k
question
stringlengths
53
3.53k
target
stringlengths
5
1.23k
len_question
int64
20
893
len_target
int64
3
238
232,200
def _get_mean_rock ( self , mag , _rake , rrup , is_reverse , imt ) : if mag <= self . NEAR_FIELD_SATURATION_MAG : C = self . COEFFS_ROCK_LOWMAG [ imt ] else : C = self . COEFFS_ROCK_HIMAG [ imt ] # clip mag if greater than 8.5. This is to avoid # ValueError: negative number cannot be raised to a fractional power mag = 8.5 if mag > 8.5 else mag mean = ( C [ 'c1' ] + C [ 'c2' ] * mag + C [ 'c3' ] * ( ( 8.5 - mag ) ** 2.5 ) + C [ 'c4' ] * numpy . log ( rrup + numpy . exp ( C [ 'c5' ] + C [ 'c6' ] * mag ) ) + C [ 'c7' ] * numpy . log ( rrup + 2 ) ) if is_reverse : # footnote in table 2 says that for reverse ruptures # the mean amplitude value should be multiplied by 1.2 mean += 0.1823215567939546 # == log(1.2) return mean
Calculate and return the mean intensity for rock sites .
273
12
232,201
def _get_stddev_rock ( self , mag , imt ) : C = self . COEFFS_ROCK_STDDERR [ imt ] if mag > C [ 'maxmag' ] : return C [ 'maxsigma' ] else : return C [ 'sigma0' ] + C [ 'magfactor' ] * mag
Calculate and return total standard deviation for rock sites .
76
12
232,202
def _get_stddev_deep_soil ( self , mag , imt ) : # footnote from table 4 says that stderr for magnitudes over 7 # is equal to one of magnitude 7. if mag > 7 : mag = 7 C = self . COEFFS_SOIL [ imt ] return C [ 'sigma0' ] + C [ 'magfactor' ] * mag
Calculate and return total standard deviation for deep soil sites .
86
13
232,203
def zip ( what , archive_zip = '' , risk_file = '' ) : if os . path . isdir ( what ) : oqzip . zip_all ( what ) elif what . endswith ( '.xml' ) and '<logicTree' in open ( what ) . read ( 512 ) : # hack to see if the NRML file is of kind logicTree oqzip . zip_source_model ( what , archive_zip ) elif what . endswith ( '.xml' ) and '<exposureModel' in open ( what ) . read ( 512 ) : # hack to see if the NRML file is of kind exposureModel oqzip . zip_exposure ( what , archive_zip ) elif what . endswith ( '.ini' ) : # a job.ini oqzip . zip_job ( what , archive_zip , risk_file ) else : sys . exit ( 'Cannot zip %s' % what )
Zip into an archive one or two job . ini files with all related files
210
16
232,204
def reduce ( fname , reduction_factor ) : if fname . endswith ( '.csv' ) : with open ( fname ) as f : line = f . readline ( ) # read the first line if csv . Sniffer ( ) . has_header ( line ) : header = line all_lines = f . readlines ( ) else : header = None f . seek ( 0 ) all_lines = f . readlines ( ) lines = general . random_filter ( all_lines , reduction_factor ) shutil . copy ( fname , fname + '.bak' ) print ( 'Copied the original file in %s.bak' % fname ) _save_csv ( fname , lines , header ) print ( 'Extracted %d lines out of %d' % ( len ( lines ) , len ( all_lines ) ) ) return elif fname . endswith ( '.npy' ) : array = numpy . load ( fname ) shutil . copy ( fname , fname + '.bak' ) print ( 'Copied the original file in %s.bak' % fname ) arr = numpy . array ( general . random_filter ( array , reduction_factor ) ) numpy . save ( fname , arr ) print ( 'Extracted %d rows out of %d' % ( len ( arr ) , len ( array ) ) ) return node = nrml . read ( fname ) model = node [ 0 ] if model . tag . endswith ( 'exposureModel' ) : total = len ( model . assets ) model . assets . nodes = general . random_filter ( model . assets , reduction_factor ) num_nodes = len ( model . assets ) elif model . tag . endswith ( 'siteModel' ) : total = len ( model ) model . nodes = general . random_filter ( model , reduction_factor ) num_nodes = len ( model ) elif model . tag . endswith ( 'sourceModel' ) : reduce_source_model ( fname , reduction_factor ) return elif model . tag . endswith ( 'logicTree' ) : for smpath in logictree . collect_info ( fname ) . smpaths : reduce_source_model ( smpath , reduction_factor ) return else : raise RuntimeError ( 'Unknown model tag: %s' % model . tag ) save_bak ( fname , node , num_nodes , total )
Produce a submodel from fname by sampling the nodes randomly . Supports source models site models and exposure models . As a special case it is also able to reduce . csv files by sampling the lines . This is a debugging utility to reduce large computations to small ones .
542
56
232,205
def downsample_mesh ( mesh , tol = 1.0 ) : idx = _find_turning_points ( mesh , tol ) if mesh . depths is not None : return RectangularMesh ( lons = mesh . lons [ : , idx ] , lats = mesh . lats [ : , idx ] , depths = mesh . depths [ : , idx ] ) else : return RectangularMesh ( lons = mesh . lons [ : , idx ] , lats = mesh . lats [ : , idx ] )
Returns a mesh sampled at a lower resolution - if the difference in azimuth is larger than the specified tolerance a turn is assumed
121
26
232,206
def downsample_trace ( mesh , tol = 1.0 ) : idx = _find_turning_points ( mesh , tol ) if mesh . depths is not None : return numpy . column_stack ( [ mesh . lons [ 0 , idx ] , mesh . lats [ 0 , idx ] , mesh . depths [ 0 , idx ] ] ) else : return numpy . column_stack ( [ mesh . lons [ 0 , idx ] , mesh . lats [ 0 , idx ] ] )
Downsamples the upper edge of a fault within a rectangular mesh retaining node points only if changes in direction on the order of tol are found
116
29
232,207
def get_ry0_distance ( self , mesh ) : # This computes ry0 by using an average strike direction top_edge = self . mesh [ 0 : 1 ] mean_strike = self . get_strike ( ) dst1 = geodetic . distance_to_arc ( top_edge . lons [ 0 , 0 ] , top_edge . lats [ 0 , 0 ] , ( mean_strike + 90. ) % 360 , mesh . lons , mesh . lats ) dst2 = geodetic . distance_to_arc ( top_edge . lons [ 0 , - 1 ] , top_edge . lats [ 0 , - 1 ] , ( mean_strike + 90. ) % 360 , mesh . lons , mesh . lats ) # Find the points on the rupture # Get the shortest distance from the two lines idx = numpy . sign ( dst1 ) == numpy . sign ( dst2 ) dst = numpy . zeros_like ( dst1 ) dst [ idx ] = numpy . fmin ( numpy . abs ( dst1 [ idx ] ) , numpy . abs ( dst2 [ idx ] ) ) return dst
Compute the minimum distance between each point of a mesh and the great circle arcs perpendicular to the average strike direction of the fault trace and passing through the end - points of the trace .
256
37
232,208
def get_rx_distance ( self , mesh ) : top_edge = self . mesh [ 0 : 1 ] dists = [ ] if top_edge . lons . shape [ 1 ] < 3 : i = 0 p1 = Point ( top_edge . lons [ 0 , i ] , top_edge . lats [ 0 , i ] , top_edge . depths [ 0 , i ] ) p2 = Point ( top_edge . lons [ 0 , i + 1 ] , top_edge . lats [ 0 , i + 1 ] , top_edge . depths [ 0 , i + 1 ] ) azimuth = p1 . azimuth ( p2 ) dists . append ( geodetic . distance_to_arc ( p1 . longitude , p1 . latitude , azimuth , mesh . lons , mesh . lats ) ) else : for i in range ( top_edge . lons . shape [ 1 ] - 1 ) : p1 = Point ( top_edge . lons [ 0 , i ] , top_edge . lats [ 0 , i ] , top_edge . depths [ 0 , i ] ) p2 = Point ( top_edge . lons [ 0 , i + 1 ] , top_edge . lats [ 0 , i + 1 ] , top_edge . depths [ 0 , i + 1 ] ) # Swapping if i == 0 : pt = p1 p1 = p2 p2 = pt # Computing azimuth and distance if i == 0 or i == top_edge . lons . shape [ 1 ] - 2 : azimuth = p1 . azimuth ( p2 ) tmp = geodetic . distance_to_semi_arc ( p1 . longitude , p1 . latitude , azimuth , mesh . lons , mesh . lats ) else : tmp = geodetic . min_distance_to_segment ( numpy . array ( [ p1 . longitude , p2 . longitude ] ) , numpy . array ( [ p1 . latitude , p2 . latitude ] ) , mesh . lons , mesh . lats ) # Correcting the sign of the distance if i == 0 : tmp *= - 1 dists . append ( tmp ) # Computing distances dists = numpy . array ( dists ) iii = abs ( dists ) . argmin ( axis = 0 ) dst = dists [ iii , list ( range ( dists . shape [ 1 ] ) ) ] return dst
Compute distance between each point of mesh and surface s great circle arc .
541
15
232,209
def get_top_edge_depth ( self ) : top_edge = self . mesh [ 0 : 1 ] if top_edge . depths is None : return 0 else : return numpy . min ( top_edge . depths )
Return minimum depth of surface s top edge .
49
9
232,210
def get_area ( self ) : mesh = self . mesh _ , _ , _ , area = mesh . get_cell_dimensions ( ) return numpy . sum ( area )
Compute area as the sum of the mesh cells area values .
39
13
232,211
def get_surface_boundaries ( self ) : mesh = self . mesh lons = numpy . concatenate ( ( mesh . lons [ 0 , : ] , mesh . lons [ 1 : , - 1 ] , mesh . lons [ - 1 , : - 1 ] [ : : - 1 ] , mesh . lons [ : - 1 , 0 ] [ : : - 1 ] ) ) lats = numpy . concatenate ( ( mesh . lats [ 0 , : ] , mesh . lats [ 1 : , - 1 ] , mesh . lats [ - 1 , : - 1 ] [ : : - 1 ] , mesh . lats [ : - 1 , 0 ] [ : : - 1 ] ) ) return [ lons ] , [ lats ]
Returns the boundaries in the same format as a multiplanar surface with two one - element lists of lons and lats
168
25
232,212
def get_resampled_top_edge ( self , angle_var = 0.1 ) : mesh = self . mesh top_edge = [ Point ( mesh . lons [ 0 ] [ 0 ] , mesh . lats [ 0 ] [ 0 ] , mesh . depths [ 0 ] [ 0 ] ) ] for i in range ( len ( mesh . triangulate ( ) [ 1 ] [ 0 ] ) - 1 ) : v1 = numpy . asarray ( mesh . triangulate ( ) [ 1 ] [ 0 ] [ i ] ) v2 = numpy . asarray ( mesh . triangulate ( ) [ 1 ] [ 0 ] [ i + 1 ] ) cosang = numpy . dot ( v1 , v2 ) sinang = numpy . linalg . norm ( numpy . cross ( v1 , v2 ) ) angle = math . degrees ( numpy . arctan2 ( sinang , cosang ) ) if abs ( angle ) > angle_var : top_edge . append ( Point ( mesh . lons [ 0 ] [ i + 1 ] , mesh . lats [ 0 ] [ i + 1 ] , mesh . depths [ 0 ] [ i + 1 ] ) ) top_edge . append ( Point ( mesh . lons [ 0 ] [ - 1 ] , mesh . lats [ 0 ] [ - 1 ] , mesh . depths [ 0 ] [ - 1 ] ) ) line_top_edge = Line ( top_edge ) return line_top_edge
This methods computes a simplified representation of a fault top edge by removing the points that are not describing a change of direction provided a certain tolerance angle .
324
30
232,213
def get_hypo_location ( self , mesh_spacing , hypo_loc = None ) : mesh = self . mesh centroid = mesh . get_middle_point ( ) if hypo_loc is None : return centroid total_len_y = ( len ( mesh . depths ) - 1 ) * mesh_spacing y_distance = hypo_loc [ 1 ] * total_len_y y_node = int ( numpy . round ( y_distance / mesh_spacing ) ) total_len_x = ( len ( mesh . lons [ y_node ] ) - 1 ) * mesh_spacing x_distance = hypo_loc [ 0 ] * total_len_x x_node = int ( numpy . round ( x_distance / mesh_spacing ) ) hypocentre = Point ( mesh . lons [ y_node ] [ x_node ] , mesh . lats [ y_node ] [ x_node ] , mesh . depths [ y_node ] [ x_node ] ) return hypocentre
The method determines the location of the hypocentre within the rupture
229
13
232,214
def viewlog ( calc_id , host = 'localhost' , port = 8000 ) : base_url = 'http://%s:%s/v1/calc/' % ( host , port ) start = 0 psize = 10 # page size try : while True : url = base_url + '%d/log/%d:%d' % ( calc_id , start , start + psize ) rows = json . load ( urlopen ( url ) ) for row in rows : print ( ' ' . join ( row ) ) start += len ( rows ) time . sleep ( 1 ) except : pass
Extract the log of the given calculation ID from the WebUI
133
13
232,215
def pickle_sequence ( objects ) : cache = { } out = [ ] for obj in objects : obj_id = id ( obj ) if obj_id not in cache : if isinstance ( obj , Pickled ) : # already pickled cache [ obj_id ] = obj else : # pickle the object cache [ obj_id ] = Pickled ( obj ) out . append ( cache [ obj_id ] ) return out
Convert an iterable of objects into a list of pickled objects . If the iterable contains copies the pickling will be done only once . If the iterable contains objects already pickled they will not be pickled again .
92
47
232,216
def check_mem_usage ( soft_percent = None , hard_percent = None ) : soft_percent = soft_percent or config . memory . soft_mem_limit hard_percent = hard_percent or config . memory . hard_mem_limit used_mem_percent = psutil . virtual_memory ( ) . percent if used_mem_percent > hard_percent : raise MemoryError ( 'Using more memory than allowed by configuration ' '(Used: %d%% / Allowed: %d%%)! Shutting down.' % ( used_mem_percent , hard_percent ) ) elif used_mem_percent > soft_percent : msg = 'Using over %d%% of the memory in %s!' return msg % ( used_mem_percent , socket . gethostname ( ) )
Display a warning if we are running out of memory
171
10
232,217
def init_workers ( ) : setproctitle ( 'oq-worker' ) # unregister raiseMasterKilled in oq-workers to avoid deadlock # since processes are terminated via pool.terminate() signal . signal ( signal . SIGTERM , signal . SIG_DFL ) # prctl is still useful (on Linux) to terminate all spawned processes # when master is killed via SIGKILL try : import prctl except ImportError : pass else : # if the parent dies, the children die prctl . set_pdeathsig ( signal . SIGKILL )
Waiting function used to wake up the process pool
124
10
232,218
def get ( self ) : val = self . pik . unpickle ( ) if self . tb_str : etype = val . __class__ msg = '\n%s%s: %s' % ( self . tb_str , etype . __name__ , val ) if issubclass ( etype , KeyError ) : raise RuntimeError ( msg ) # nicer message else : raise etype ( msg ) return val
Returns the underlying value or raise the underlying exception
95
9
232,219
def sum ( cls , iresults ) : res = object . __new__ ( cls ) res . received = [ ] res . sent = 0 for iresult in iresults : res . received . extend ( iresult . received ) res . sent += iresult . sent name = iresult . name . split ( '#' , 1 ) [ 0 ] if hasattr ( res , 'name' ) : assert res . name . split ( '#' , 1 ) [ 0 ] == name , ( res . name , name ) else : res . name = iresult . name . split ( '#' ) [ 0 ] return res
Sum the data transfer information of a set of results
135
10
232,220
def log_percent ( self ) : done = self . total - self . todo percent = int ( float ( done ) / self . total * 100 ) if not hasattr ( self , 'prev_percent' ) : # first time self . prev_percent = 0 self . progress ( 'Sent %s of data in %d %s task(s)' , humansize ( self . sent . sum ( ) ) , self . total , self . name ) elif percent > self . prev_percent : self . progress ( '%s %3d%% [of %d tasks]' , self . name , percent , len ( self . tasks ) ) self . prev_percent = percent return done
Log the progress of the computation in percentage
147
8
232,221
def submit ( self , * args , func = None , monitor = None ) : monitor = monitor or self . monitor func = func or self . task_func if not hasattr ( self , 'socket' ) : # first time self . __class__ . running_tasks = self . tasks self . socket = Socket ( self . receiver , zmq . PULL , 'bind' ) . __enter__ ( ) monitor . backurl = 'tcp://%s:%s' % ( config . dbserver . host , self . socket . port ) assert not isinstance ( args [ - 1 ] , Monitor ) # sanity check dist = 'no' if self . num_tasks == 1 else self . distribute if dist != 'no' : args = pickle_sequence ( args ) self . sent += numpy . array ( [ len ( p ) for p in args ] ) res = submit [ dist ] ( self , func , args , monitor ) self . tasks . append ( res )
Submit the given arguments to the underlying task
213
8
232,222
def reduce ( self , agg = operator . add , acc = None ) : return self . submit_all ( ) . reduce ( agg , acc )
Submit all tasks and reduce the results
31
7
232,223
def convert ( self , imtls , idx = 0 ) : curve = numpy . zeros ( 1 , imtls . dt ) for imt in imtls : curve [ imt ] = self . array [ imtls ( imt ) , idx ] return curve [ 0 ]
Convert a probability curve into a record of dtype imtls . dt .
66
18
232,224
def nbytes ( self ) : try : N , L , I = get_shape ( [ self ] ) except AllEmptyProbabilityMaps : return 0 return BYTES_PER_FLOAT * N * L * I
The size of the underlying array
49
6
232,225
def convert ( self , imtls , nsites , idx = 0 ) : curves = numpy . zeros ( nsites , imtls . dt ) for imt in curves . dtype . names : curves_by_imt = curves [ imt ] for sid in self : curves_by_imt [ sid ] = self [ sid ] . array [ imtls ( imt ) , idx ] return curves
Convert a probability map into a composite array of length nsites and dtype imtls . dt .
94
23
232,226
def filter ( self , sids ) : dic = self . __class__ ( self . shape_y , self . shape_z ) for sid in sids : try : dic [ sid ] = self [ sid ] except KeyError : pass return dic
Extracs a submap of self for the given sids .
56
14
232,227
def extract ( self , inner_idx ) : out = self . __class__ ( self . shape_y , 1 ) for sid in self : curve = self [ sid ] array = curve . array [ : , inner_idx ] . reshape ( - 1 , 1 ) out [ sid ] = ProbabilityCurve ( array ) return out
Extracts a component of the underlying ProbabilityCurves specified by the index inner_idx .
74
21
232,228
def compare ( what , imt , calc_ids , files , samplesites = 100 , rtol = .1 , atol = 1E-4 ) : sids , imtls , poes , arrays = getdata ( what , calc_ids , samplesites ) try : levels = imtls [ imt ] except KeyError : sys . exit ( '%s not found. The available IMTs are %s' % ( imt , list ( imtls ) ) ) imt2idx = { imt : i for i , imt in enumerate ( imtls ) } head = [ 'site_id' ] if files else [ 'site_id' , 'calc_id' ] if what == 'hcurves' : array_imt = arrays [ : , : , imtls ( imt ) ] header = head + [ '%.5f' % lvl for lvl in levels ] else : # hmaps array_imt = arrays [ : , : , imt2idx [ imt ] ] header = head + [ str ( poe ) for poe in poes ] rows = collections . defaultdict ( list ) diff_idxs = get_diff_idxs ( array_imt , rtol , atol ) if len ( diff_idxs ) == 0 : print ( 'There are no differences within the tolerance of %d%%' % ( rtol * 100 ) ) return arr = array_imt . transpose ( 1 , 0 , 2 ) # shape (N, C, L) for sid , array in sorted ( zip ( sids [ diff_idxs ] , arr [ diff_idxs ] ) ) : for calc_id , cols in zip ( calc_ids , array ) : if files : rows [ calc_id ] . append ( [ sid ] + list ( cols ) ) else : rows [ 'all' ] . append ( [ sid , calc_id ] + list ( cols ) ) if files : fdict = { calc_id : open ( '%s.txt' % calc_id , 'w' ) for calc_id in calc_ids } for calc_id , f in fdict . items ( ) : f . write ( views . rst_table ( rows [ calc_id ] , header ) ) print ( 'Generated %s' % f . name ) else : print ( views . rst_table ( rows [ 'all' ] , header ) )
Compare the hazard curves or maps of two or more calculations
536
11
232,229
def build_filename ( filename , filetype = 'png' , resolution = 300 ) : filevals = os . path . splitext ( filename ) if filevals [ 1 ] : filetype = filevals [ 1 ] [ 1 : ] if not filetype : filetype = 'png' filename = filevals [ 0 ] + '.' + filetype if not resolution : resolution = 300 return filename , filetype , resolution
Uses the input properties to create the string of the filename
89
12
232,230
def _get_catalogue_bin_limits ( catalogue , dmag ) : mag_bins = np . arange ( float ( np . floor ( np . min ( catalogue . data [ 'magnitude' ] ) ) ) - dmag , float ( np . ceil ( np . max ( catalogue . data [ 'magnitude' ] ) ) ) + dmag , dmag ) counter = np . histogram ( catalogue . data [ 'magnitude' ] , mag_bins ) [ 0 ] idx = np . where ( counter > 0 ) [ 0 ] mag_bins = mag_bins [ idx [ 0 ] : ( idx [ - 1 ] + 2 ) ] return mag_bins
Returns the magnitude bins corresponing to the catalogue
157
10
232,231
def plot_depth_histogram ( catalogue , bin_width , normalisation = False , bootstrap = None , filename = None , figure_size = ( 8 , 6 ) , filetype = 'png' , dpi = 300 , ax = None ) : if ax is None : fig , ax = plt . subplots ( figsize = figure_size ) else : fig = ax . get_figure ( ) # Create depth range if len ( catalogue . data [ 'depth' ] ) == 0 : # pylint: disable=len-as-condition raise ValueError ( 'No depths reported in catalogue!' ) depth_bins = np . arange ( 0. , np . max ( catalogue . data [ 'depth' ] ) + bin_width , bin_width ) depth_hist = catalogue . get_depth_distribution ( depth_bins , normalisation , bootstrap ) ax . bar ( depth_bins [ : - 1 ] , depth_hist , width = 0.95 * bin_width , edgecolor = 'k' ) ax . set_xlabel ( 'Depth (km)' ) if normalisation : ax . set_ylabel ( 'Probability Mass Function' ) else : ax . set_ylabel ( 'Count' ) ax . set_title ( 'Depth Histogram' ) _save_image ( fig , filename , filetype , dpi )
Creates a histogram of the depths in the catalogue
298
11
232,232
def plot_magnitude_depth_density ( catalogue , mag_int , depth_int , logscale = False , normalisation = False , bootstrap = None , filename = None , figure_size = ( 8 , 6 ) , filetype = 'png' , dpi = 300 , ax = None ) : if len ( catalogue . data [ 'depth' ] ) == 0 : # pylint: disable=len-as-condition raise ValueError ( 'No depths reported in catalogue!' ) depth_bins = np . arange ( 0. , np . max ( catalogue . data [ 'depth' ] ) + depth_int , depth_int ) mag_bins = _get_catalogue_bin_limits ( catalogue , mag_int ) mag_depth_dist = catalogue . get_magnitude_depth_distribution ( mag_bins , depth_bins , normalisation , bootstrap ) vmin_val = np . min ( mag_depth_dist [ mag_depth_dist > 0. ] ) if ax is None : fig , ax = plt . subplots ( figsize = figure_size ) else : fig = ax . get_figure ( ) if logscale : normaliser = LogNorm ( vmin = vmin_val , vmax = np . max ( mag_depth_dist ) ) else : normaliser = Normalize ( vmin = 0 , vmax = np . max ( mag_depth_dist ) ) im = ax . pcolor ( mag_bins [ : - 1 ] , depth_bins [ : - 1 ] , mag_depth_dist . T , norm = normaliser ) ax . set_xlabel ( 'Magnitude' ) ax . set_ylabel ( 'Depth (km)' ) ax . set_xlim ( mag_bins [ 0 ] , mag_bins [ - 1 ] ) ax . set_ylim ( depth_bins [ 0 ] , depth_bins [ - 1 ] ) fig . colorbar ( im , ax = ax ) if normalisation : ax . set_title ( 'Magnitude-Depth Density' ) else : ax . set_title ( 'Magnitude-Depth Count' ) _save_image ( fig , filename , filetype , dpi )
Creates a density plot of the magnitude and depth distribution
489
11
232,233
def plot_magnitude_time_scatter ( catalogue , plot_error = False , fmt_string = 'o' , filename = None , figure_size = ( 8 , 6 ) , filetype = 'png' , dpi = 300 , ax = None ) : if ax is None : fig , ax = plt . subplots ( figsize = figure_size ) else : fig = ax . get_figure ( ) dtime = catalogue . get_decimal_time ( ) # pylint: disable=len-as-condition if len ( catalogue . data [ 'sigmaMagnitude' ] ) == 0 : print ( 'Magnitude Error is missing - neglecting error bars!' ) plot_error = False if plot_error : ax . errorbar ( dtime , catalogue . data [ 'magnitude' ] , xerr = None , yerr = catalogue . data [ 'sigmaMagnitude' ] , fmt = fmt_string ) else : ax . plot ( dtime , catalogue . data [ 'magnitude' ] , fmt_string ) ax . set_xlabel ( 'Year' ) ax . set_ylabel ( 'Magnitude' ) ax . set_title ( 'Magnitude-Time Plot' ) _save_image ( fig , filename , filetype , dpi )
Creates a simple scatter plot of magnitude with time
282
10
232,234
def plot_magnitude_time_density ( catalogue , mag_int , time_int , completeness = None , normalisation = False , logscale = True , bootstrap = None , xlim = [ ] , ylim = [ ] , filename = None , figure_size = ( 8 , 6 ) , filetype = 'png' , dpi = 300 , ax = None ) : if ax is None : fig , ax = plt . subplots ( figsize = figure_size ) else : fig = ax . get_figure ( ) # Create the magnitude bins if isinstance ( mag_int , ( np . ndarray , list ) ) : mag_bins = mag_int else : mag_bins = np . arange ( np . min ( catalogue . data [ 'magnitude' ] ) , np . max ( catalogue . data [ 'magnitude' ] ) + mag_int / 2. , mag_int ) # Creates the time bins if isinstance ( time_int , ( np . ndarray , list ) ) : time_bins = time_int else : time_bins = np . arange ( float ( np . min ( catalogue . data [ 'year' ] ) ) , float ( np . max ( catalogue . data [ 'year' ] ) ) + 1. , float ( time_int ) ) # Get magnitude-time distribution mag_time_dist = catalogue . get_magnitude_time_distribution ( mag_bins , time_bins , normalisation , bootstrap ) # Get smallest non-zero value vmin_val = np . min ( mag_time_dist [ mag_time_dist > 0. ] ) # Create plot if logscale : norm_data = LogNorm ( vmin = vmin_val , vmax = np . max ( mag_time_dist ) ) else : if normalisation : norm_data = Normalize ( vmin = vmin_val , vmax = np . max ( mag_time_dist ) ) else : norm_data = Normalize ( vmin = 1.0 , vmax = np . max ( mag_time_dist ) ) im = ax . pcolor ( time_bins [ : - 1 ] , mag_bins [ : - 1 ] , mag_time_dist . T , norm = norm_data ) ax . set_xlabel ( 'Time (year)' ) ax . set_ylabel ( 'Magnitude' ) if len ( xlim ) == 2 : ax . set_xlim ( xlim [ 0 ] , xlim [ 1 ] ) else : ax . set_xlim ( time_bins [ 0 ] , time_bins [ - 1 ] ) if len ( ylim ) == 2 : ax . set_ylim ( ylim [ 0 ] , ylim [ 1 ] ) else : ax . set_ylim ( mag_bins [ 0 ] , mag_bins [ - 1 ] + ( mag_bins [ - 1 ] - mag_bins [ - 2 ] ) ) # Fix the title if normalisation : fig . colorbar ( im , label = 'Event Density' , shrink = 0.9 , ax = ax ) else : fig . colorbar ( im , label = 'Event Count' , shrink = 0.9 , ax = ax ) ax . grid ( True ) # Plot completeness if completeness is not None : _plot_completeness ( ax , completeness , time_bins [ 0 ] , time_bins [ - 1 ] ) _save_image ( fig , filename , filetype , dpi )
Creates a plot of magnitude - time density
782
9
232,235
def _plot_completeness ( ax , comw , start_time , end_time ) : comw = np . array ( comw ) comp = np . column_stack ( [ np . hstack ( [ end_time , comw [ : , 0 ] , start_time ] ) , np . hstack ( [ comw [ 0 , 1 ] , comw [ : , 1 ] , comw [ - 1 , 1 ] ] ) ] ) ax . step ( comp [ : - 1 , 0 ] , comp [ 1 : , 1 ] , linestyle = '-' , where = "post" , linewidth = 3 , color = 'brown' )
Adds completeness intervals to a plot
144
7
232,236
def get_completeness_adjusted_table ( catalogue , completeness , dmag , offset = 1.0E-5 , end_year = None , plot = False , figure_size = ( 8 , 6 ) , filename = None , filetype = 'png' , dpi = 300 , ax = None ) : if not end_year : end_year = catalogue . end_year # Find the natural bin limits mag_bins = _get_catalogue_bin_limits ( catalogue , dmag ) obs_time = end_year - completeness [ : , 0 ] + 1. obs_rates = np . zeros_like ( mag_bins ) durations = np . zeros_like ( mag_bins ) n_comp = np . shape ( completeness ) [ 0 ] for iloc in range ( n_comp ) : low_mag = completeness [ iloc , 1 ] comp_year = completeness [ iloc , 0 ] if iloc == ( n_comp - 1 ) : idx = np . logical_and ( catalogue . data [ 'magnitude' ] >= low_mag - offset , catalogue . data [ 'year' ] >= comp_year ) high_mag = mag_bins [ - 1 ] obs_idx = mag_bins >= ( low_mag - offset ) else : high_mag = completeness [ iloc + 1 , 1 ] mag_idx = np . logical_and ( catalogue . data [ 'magnitude' ] >= low_mag - offset , catalogue . data [ 'magnitude' ] < ( high_mag - offset ) ) idx = np . logical_and ( mag_idx , catalogue . data [ 'year' ] >= ( comp_year - offset ) ) obs_idx = np . logical_and ( mag_bins >= ( low_mag - offset ) , mag_bins < ( high_mag + offset ) ) temp_rates = np . histogram ( catalogue . data [ 'magnitude' ] [ idx ] , mag_bins [ obs_idx ] ) [ 0 ] temp_rates = temp_rates . astype ( float ) / obs_time [ iloc ] obs_rates [ obs_idx [ : - 1 ] ] = temp_rates durations [ obs_idx [ : - 1 ] ] = obs_time [ iloc ] selector = np . where ( obs_rates > 0. ) [ 0 ] mag_bins = mag_bins [ selector ] obs_rates = obs_rates [ selector ] durations = durations [ selector ] # Get cumulative rates cum_rates = np . array ( [ sum ( obs_rates [ iloc : ] ) for iloc in range ( 0 , len ( obs_rates ) ) ] ) if plot : plt . figure ( figsize = figure_size ) plt . semilogy ( mag_bins + dmag / 2. , obs_rates , "bo" , label = "Incremental" ) plt . semilogy ( mag_bins + dmag / 2. , cum_rates , "rs" , label = "Cumulative" ) plt . xlabel ( "Magnitude (M)" , fontsize = 16 ) plt . ylabel ( "Annual Rate" , fontsize = 16 ) plt . grid ( True ) plt . legend ( fontsize = 16 ) if filename : plt . savefig ( filename , format = filetype , dpi = dpi , bbox_inches = "tight" ) return np . column_stack ( [ mag_bins , durations , obs_rates , cum_rates , np . log10 ( cum_rates ) ] )
Counts the number of earthquakes in each magnitude bin and normalises the rate to annual rates taking into account the completeness
802
24
232,237
def plot_observed_recurrence ( catalogue , completeness , dmag , end_year = None , filename = None , figure_size = ( 8 , 6 ) , filetype = 'png' , dpi = 300 , ax = None ) : # Get completeness adjusted recurrence table if isinstance ( completeness , float ) : # Unique completeness completeness = np . array ( [ [ np . min ( catalogue . data [ 'year' ] ) , completeness ] ] ) if not end_year : end_year = catalogue . update_end_year ( ) catalogue . data [ "dtime" ] = catalogue . get_decimal_time ( ) cent_mag , t_per , n_obs = get_completeness_counts ( catalogue , completeness , dmag ) obs_rates = n_obs / t_per cum_obs_rates = np . array ( [ np . sum ( obs_rates [ i : ] ) for i in range ( len ( obs_rates ) ) ] ) if ax is None : fig , ax = plt . subplots ( figsize = figure_size ) else : fig = ax . get_figure ( ) ax . semilogy ( cent_mag , obs_rates , 'bo' , label = "Incremental" ) ax . semilogy ( cent_mag , cum_obs_rates , 'rs' , label = "Cumulative" ) ax . set_xlim ( [ cent_mag [ 0 ] - 0.1 , cent_mag [ - 1 ] + 0.1 ] ) ax . set_xlabel ( 'Magnitude' ) ax . set_ylabel ( 'Annual Rate' ) ax . legend ( ) _save_image ( fig , filename , filetype , dpi )
Plots the observed recurrence taking into account the completeness
382
12
232,238
def get_number_observations ( self ) : if isinstance ( self . data , dict ) and ( 'exx' in self . data . keys ( ) ) : return len ( self . data [ 'exx' ] ) else : return 0
Returns the number of observations in the data file
55
9
232,239
def plot_lc ( calc_id , aid = None ) : # read the hazard data dstore = util . read ( calc_id ) dset = dstore [ 'agg_curves-rlzs' ] if aid is None : # plot the global curves plt = make_figure ( dset . attrs [ 'return_periods' ] , dset . value ) else : sys . exit ( 'Not implemented yet' ) plt . show ( )
Plot loss curves given a calculation id and an asset ordinal .
101
13
232,240
def get_weighted_poes ( gsim , sctx , rctx , dctx , imt , imls , truncation_level , weighting = DEFAULT_WEIGHTING ) : if truncation_level is not None and truncation_level < 0 : raise ValueError ( 'truncation level must be zero, positive number ' 'or None' ) gsim . _check_imt ( imt ) adjustment = nga_west2_epistemic_adjustment ( rctx . mag , dctx . rrup ) adjustment = adjustment . reshape ( adjustment . shape + ( 1 , ) ) if truncation_level == 0 : # zero truncation mode, just compare imls to mean imls = gsim . to_distribution_values ( imls ) mean , _ = gsim . get_mean_and_stddevs ( sctx , rctx , dctx , imt , [ ] ) mean = mean . reshape ( mean . shape + ( 1 , ) ) output = np . zeros ( [ mean . shape [ 0 ] , imls . shape [ 0 ] ] ) for ( wgt , fct ) in weighting : output += ( wgt * ( imls <= ( mean + ( fct * adjustment ) ) ) . astype ( float ) ) return output else : # use real normal distribution assert ( const . StdDev . TOTAL in gsim . DEFINED_FOR_STANDARD_DEVIATION_TYPES ) imls = gsim . to_distribution_values ( imls ) mean , [ stddev ] = gsim . get_mean_and_stddevs ( sctx , rctx , dctx , imt , [ const . StdDev . TOTAL ] ) mean = mean . reshape ( mean . shape + ( 1 , ) ) stddev = stddev . reshape ( stddev . shape + ( 1 , ) ) output = np . zeros ( [ mean . shape [ 0 ] , imls . shape [ 0 ] ] ) for ( wgt , fct ) in weighting : values = ( imls - ( mean + ( fct * adjustment ) ) ) / stddev if truncation_level is None : output += ( wgt * _norm_sf ( values ) ) else : output += ( wgt * _truncnorm_sf ( truncation_level , values ) ) return output
This function implements the NGA West 2 GMPE epistemic uncertainty adjustment factor without re - calculating the actual GMPE each time .
525
26
232,241
def register_fields ( w ) : PARAMS_LIST = [ BASE_PARAMS , GEOMETRY_PARAMS , MFD_PARAMS ] for PARAMS in PARAMS_LIST : for _ , param , dtype in PARAMS : w . field ( param , fieldType = dtype , size = FIELD_SIZE ) PARAMS_LIST = [ RATE_PARAMS , STRIKE_PARAMS , DIP_PARAMS , RAKE_PARAMS , NPW_PARAMS , HDEPTH_PARAMS , HDW_PARAMS , PLANES_STRIKES_PARAM , PLANES_DIPS_PARAM ] for PARAMS in PARAMS_LIST : for param , dtype in PARAMS : w . field ( param , fieldType = dtype , size = FIELD_SIZE ) # source typology w . field ( 'sourcetype' , 'C' )
Register shapefile fields .
199
5
232,242
def extract_source_params ( src ) : tags = get_taglist ( src ) data = [ ] for key , param , vtype in BASE_PARAMS : if key in src . attrib : if vtype == "c" : data . append ( ( param , src . attrib [ key ] ) ) elif vtype == "f" : data . append ( ( param , float ( src . attrib [ key ] ) ) ) else : data . append ( ( param , None ) ) elif key in tags : if vtype == "c" : data . append ( ( param , src . nodes [ tags . index ( key ) ] . text ) ) elif vtype == "f" : data . append ( ( param , float ( src . nodes [ tags . index ( key ) ] . text ) ) ) else : data . append ( ( param , None ) ) else : data . append ( ( param , None ) ) return dict ( data )
Extract params from source object .
207
7
232,243
def parse_complex_fault_geometry ( node ) : assert "complexFaultGeometry" in node . tag # Get general attributes geometry = { "intermediateEdges" : [ ] } for subnode in node : crds = subnode . nodes [ 0 ] . nodes [ 0 ] . text if "faultTopEdge" in subnode . tag : geometry [ "faultTopEdge" ] = numpy . array ( [ [ crds [ i ] , crds [ i + 1 ] , crds [ i + 2 ] ] for i in range ( 0 , len ( crds ) , 3 ) ] ) geometry [ "upperSeismoDepth" ] = numpy . min ( geometry [ "faultTopEdge" ] [ : , 2 ] ) elif "faultBottomEdge" in subnode . tag : geometry [ "faultBottomEdge" ] = numpy . array ( [ [ crds [ i ] , crds [ i + 1 ] , crds [ i + 2 ] ] for i in range ( 0 , len ( crds ) , 3 ) ] ) geometry [ "lowerSeismoDepth" ] = numpy . max ( geometry [ "faultBottomEdge" ] [ : , 2 ] ) elif "intermediateEdge" in subnode . tag : geometry [ "intermediateEdges" ] . append ( numpy . array ( [ [ crds [ i ] , crds [ i + 1 ] , crds [ i + 2 ] ] for i in range ( 0 , len ( crds ) , 3 ) ] ) ) else : pass geometry [ "dip" ] = None return geometry
Parses a complex fault geometry node returning both the attributes and parameters in a dictionary
351
17
232,244
def parse_planar_fault_geometry ( node ) : assert "planarSurface" in node . tag geometry = { "strike" : node . attrib [ "strike" ] , "dip" : node . attrib [ "dip" ] } upper_depth = numpy . inf lower_depth = 0.0 tags = get_taglist ( node ) corner_points = [ ] for locn in [ "topLeft" , "topRight" , "bottomRight" , "bottomLeft" ] : plane = node . nodes [ tags . index ( locn ) ] upper_depth = plane [ "depth" ] if plane [ "depth" ] < upper_depth else upper_depth lower_depth = plane [ "depth" ] if plane [ "depth" ] > lower_depth else lower_depth corner_points . append ( [ plane [ "lon" ] , plane [ "lat" ] , plane [ "depth" ] ] ) geometry [ "upperSeismoDepth" ] = upper_depth geometry [ "lowerSeismoDepth" ] = lower_depth geometry [ "corners" ] = numpy . array ( corner_points ) return geometry
Parses a planar fault geometry node returning both the attributes and parameters in a dictionary
253
18
232,245
def extract_mfd_params ( src ) : tags = get_taglist ( src ) if "incrementalMFD" in tags : mfd_node = src . nodes [ tags . index ( "incrementalMFD" ) ] elif "truncGutenbergRichterMFD" in tags : mfd_node = src . nodes [ tags . index ( "truncGutenbergRichterMFD" ) ] elif "arbitraryMFD" in tags : mfd_node = src . nodes [ tags . index ( "arbitraryMFD" ) ] elif "YoungsCoppersmithMFD" in tags : mfd_node = src . nodes [ tags . index ( "YoungsCoppersmithMFD" ) ] else : raise ValueError ( "Source %s contains no supported MFD type!" % src . tag ) data = [ ] rates = [ ] for key , param , vtype in MFD_PARAMS : if key in mfd_node . attrib and mfd_node . attrib [ key ] is not None : data . append ( ( param , mfd_node . attrib [ key ] ) ) else : data . append ( ( param , None ) ) if ( "incrementalMFD" or "arbitraryMFD" ) in mfd_node . tag : # Extract Rates rates = ~ mfd_node . occurRates n_r = len ( rates ) if n_r > MAX_RATES : raise ValueError ( "Number of rates in source %s too large " "to be placed into shapefile" % src . tag ) rate_dict = dict ( [ ( key , rates [ i ] if i < n_r else None ) for i , ( key , _ ) in enumerate ( RATE_PARAMS ) ] ) elif "YoungsCoppersmithMFD" in mfd_node . tag : rate_dict = dict ( [ ( key , mfd_node . attrib [ 'characteristicRate' ] ) for i , ( key , _ ) in enumerate ( RATE_PARAMS ) ] ) else : rate_dict = dict ( [ ( key , None ) for i , ( key , _ ) in enumerate ( RATE_PARAMS ) ] ) return dict ( data ) , rate_dict
Extracts the MFD parameters from an object
498
10
232,246
def extract_source_hypocentral_depths ( src ) : if "pointSource" not in src . tag and "areaSource" not in src . tag : hds = dict ( [ ( key , None ) for key , _ in HDEPTH_PARAMS ] ) hdsw = dict ( [ ( key , None ) for key , _ in HDW_PARAMS ] ) return hds , hdsw tags = get_taglist ( src ) hdd_nodeset = src . nodes [ tags . index ( "hypoDepthDist" ) ] if len ( hdd_nodeset ) > MAX_HYPO_DEPTHS : raise ValueError ( "Number of hypocentral depths %s exceeds stated " "maximum of %s" % ( str ( len ( hdd_nodeset ) ) , str ( MAX_HYPO_DEPTHS ) ) ) if len ( hdd_nodeset ) : hds = [ ] hdws = [ ] for hdd_node in hdd_nodeset : hds . append ( float ( hdd_node . attrib [ "depth" ] ) ) hdws . append ( float ( hdd_node . attrib [ "probability" ] ) ) hds = expand_src_param ( hds , HDEPTH_PARAMS ) hdsw = expand_src_param ( hdws , HDW_PARAMS ) else : hds = dict ( [ ( key , None ) for key , _ in HDEPTH_PARAMS ] ) hdsw = dict ( [ ( key , None ) for key , _ in HDW_PARAMS ] ) return hds , hdsw
Extract source hypocentral depths .
369
8
232,247
def extract_source_planes_strikes_dips ( src ) : if "characteristicFaultSource" not in src . tag : strikes = dict ( [ ( key , None ) for key , _ in PLANES_STRIKES_PARAM ] ) dips = dict ( [ ( key , None ) for key , _ in PLANES_DIPS_PARAM ] ) return strikes , dips tags = get_taglist ( src ) surface_set = src . nodes [ tags . index ( "surface" ) ] strikes = [ ] dips = [ ] num_planes = 0 for surface in surface_set : if "planarSurface" in surface . tag : strikes . append ( float ( surface . attrib [ "strike" ] ) ) dips . append ( float ( surface . attrib [ "dip" ] ) ) num_planes += 1 if num_planes > MAX_PLANES : raise ValueError ( "Number of planes in sourcs %s exceededs maximum " "of %s" % ( str ( num_planes ) , str ( MAX_PLANES ) ) ) if num_planes : strikes = expand_src_param ( strikes , PLANES_STRIKES_PARAM ) dips = expand_src_param ( dips , PLANES_DIPS_PARAM ) else : strikes = dict ( [ ( key , None ) for key , _ in PLANES_STRIKES_PARAM ] ) dips = dict ( [ ( key , None ) for key , _ in PLANES_DIPS_PARAM ] ) return strikes , dips
Extract strike and dip angles for source defined by multiple planes .
339
13
232,248
def set_params ( w , src ) : params = extract_source_params ( src ) # this is done because for characteristic sources geometry is in # 'surface' attribute params . update ( extract_geometry_params ( src ) ) mfd_pars , rate_pars = extract_mfd_params ( src ) params . update ( mfd_pars ) params . update ( rate_pars ) strikes , dips , rakes , np_weights = extract_source_nodal_planes ( src ) params . update ( strikes ) params . update ( dips ) params . update ( rakes ) params . update ( np_weights ) hds , hdsw = extract_source_hypocentral_depths ( src ) params . update ( hds ) params . update ( hdsw ) pstrikes , pdips = extract_source_planes_strikes_dips ( src ) params . update ( pstrikes ) params . update ( pdips ) params [ 'sourcetype' ] = striptag ( src . tag ) w . record ( * * params )
Set source parameters .
239
4
232,249
def set_area_geometry ( w , src ) : assert "areaSource" in src . tag geometry_node = src . nodes [ get_taglist ( src ) . index ( "areaGeometry" ) ] area_attrs = parse_area_geometry ( geometry_node ) w . poly ( parts = [ area_attrs [ "polygon" ] . tolist ( ) ] )
Set area polygon as shapefile geometry
87
8
232,250
def set_point_geometry ( w , src ) : assert "pointSource" in src . tag geometry_node = src . nodes [ get_taglist ( src ) . index ( "pointGeometry" ) ] point_attrs = parse_point_geometry ( geometry_node ) w . point ( point_attrs [ "point" ] [ 0 ] , point_attrs [ "point" ] [ 1 ] )
Set point location as shapefile geometry .
93
8
232,251
def set_simple_fault_geometry ( w , src ) : assert "simpleFaultSource" in src . tag geometry_node = src . nodes [ get_taglist ( src ) . index ( "simpleFaultGeometry" ) ] fault_attrs = parse_simple_fault_geometry ( geometry_node ) w . line ( parts = [ fault_attrs [ "trace" ] . tolist ( ) ] )
Set simple fault trace coordinates as shapefile geometry .
96
10
232,252
def set_simple_fault_geometry_3D ( w , src ) : assert "simpleFaultSource" in src . tag geometry_node = src . nodes [ get_taglist ( src ) . index ( "simpleFaultGeometry" ) ] fault_attrs = parse_simple_fault_geometry ( geometry_node ) build_polygon_from_fault_attrs ( w , fault_attrs )
Builds a 3D polygon from a node instance
96
11
232,253
def appraise_source_model ( self ) : for src in self . sources : # source params src_taglist = get_taglist ( src ) if "areaSource" in src . tag : self . has_area_source = True npd_node = src . nodes [ src_taglist . index ( "nodalPlaneDist" ) ] npd_size = len ( npd_node ) hdd_node = src . nodes [ src_taglist . index ( "hypoDepthDist" ) ] hdd_size = len ( hdd_node ) self . num_np = ( npd_size if npd_size > self . num_np else self . num_np ) self . num_hd = ( hdd_size if hdd_size > self . num_hd else self . num_hd ) elif "pointSource" in src . tag : self . has_point_source = True npd_node = src . nodes [ src_taglist . index ( "nodalPlaneDist" ) ] npd_size = len ( npd_node ) hdd_node = src . nodes [ src_taglist . index ( "hypoDepthDist" ) ] hdd_size = len ( hdd_node ) self . num_np = ( npd_size if npd_size > self . num_np else self . num_np ) self . num_hd = ( hdd_size if hdd_size > self . num_hd else self . num_hd ) elif "simpleFaultSource" in src . tag : self . has_simple_fault_geometry = True elif "complexFaultSource" in src . tag : self . has_complex_fault_geometry = True elif "characteristicFaultSource" in src . tag : # Get the surface node surface_node = src . nodes [ src_taglist . index ( "surface" ) ] p_size = 0 for surface in surface_node . nodes : if "simpleFaultGeometry" in surface . tag : self . has_simple_fault_geometry = True elif "complexFaultGeometry" in surface . tag : self . has_complex_fault_geometry = True elif "planarSurface" in surface . tag : self . has_planar_geometry = True p_size += 1 self . num_p = p_size if p_size > self . num_p else self . num_p else : pass # MFD params if "truncGutenbergRichterMFD" in src_taglist : self . has_mfd_gr = True elif "incrementalMFD" in src_taglist : self . has_mfd_incremental = True # Get rate size mfd_node = src . nodes [ src_taglist . index ( "incrementalMFD" ) ] r_size = len ( mfd_node . nodes [ 0 ] . text ) self . num_r = r_size if r_size > self . num_r else self . num_r else : pass
Identify parameters defined in NRML source model file so that shapefile contains only source model specific fields .
681
21
232,254
def write ( self , destination , source_model , name = None ) : if os . path . exists ( destination ) : os . remove ( destination ) self . destination = destination if name : source_model . name = name output_source_model = Node ( "sourceModel" , { "name" : name } ) dic = groupby ( source_model . sources , operator . itemgetter ( 'tectonicRegion' ) ) for i , ( trt , srcs ) in enumerate ( dic . items ( ) , 1 ) : output_source_model . append ( Node ( 'sourceGroup' , { 'tectonicRegion' : trt , 'name' : 'group %d' % i } , nodes = srcs ) ) print ( "Exporting Source Model to %s" % self . destination ) with open ( self . destination , "wb" ) as f : nrml . write ( [ output_source_model ] , f , "%s" )
Exports to NRML
213
5
232,255
def filter_params ( self , src_mod ) : # point and area related params STRIKE_PARAMS [ src_mod . num_np : ] = [ ] DIP_PARAMS [ src_mod . num_np : ] = [ ] RAKE_PARAMS [ src_mod . num_np : ] = [ ] NPW_PARAMS [ src_mod . num_np : ] = [ ] HDEPTH_PARAMS [ src_mod . num_hd : ] = [ ] HDW_PARAMS [ src_mod . num_hd : ] = [ ] # planar rupture related params PLANES_STRIKES_PARAM [ src_mod . num_p : ] = [ ] PLANES_DIPS_PARAM [ src_mod . num_p : ] = [ ] # rate params RATE_PARAMS [ src_mod . num_r : ] = [ ] if src_mod . has_simple_fault_geometry is False : GEOMETRY_PARAMS . remove ( ( 'dip' , 'dip' , 'f' ) ) if ( src_mod . has_simple_fault_geometry is False and src_mod . has_complex_fault_geometry is False and src_mod . has_planar_geometry is False ) : BASE_PARAMS . remove ( ( 'rake' , 'rake' , 'f' ) ) if ( src_mod . has_simple_fault_geometry is False and src_mod . has_complex_fault_geometry is False and src_mod . has_area_source is False and src_mod . has_point_source is False ) : GEOMETRY_PARAMS [ : ] = [ ] if src_mod . has_mfd_incremental is False : MFD_PARAMS . remove ( ( 'binWidth' , 'bin_width' , 'f' ) )
Remove params uneeded by source_model
424
9
232,256
def tostring ( node , indent = 4 , nsmap = None ) : out = io . BytesIO ( ) writer = StreamingXMLWriter ( out , indent , nsmap = nsmap ) writer . serialize ( node ) return out . getvalue ( )
Convert a node into an XML string by using the StreamingXMLWriter . This is useful for testing purposes .
59
23
232,257
def parse ( source , remove_comments = True , * * kw ) : return ElementTree . parse ( source , SourceLineParser ( ) , * * kw )
Thin wrapper around ElementTree . parse
36
8
232,258
def iterparse ( source , events = ( 'end' , ) , remove_comments = True , * * kw ) : return ElementTree . iterparse ( source , events , SourceLineParser ( ) , * * kw )
Thin wrapper around ElementTree . iterparse
49
9
232,259
def _displayattrs ( attrib , expandattrs ) : if not attrib : return '' if expandattrs : alist = [ '%s=%r' % item for item in sorted ( attrib . items ( ) ) ] else : alist = list ( attrib ) return '{%s}' % ', ' . join ( alist )
Helper function to display the attributes of a Node object in lexicographic order .
78
16
232,260
def _display ( node , indent , expandattrs , expandvals , output ) : attrs = _displayattrs ( node . attrib , expandattrs ) if node . text is None or not expandvals : val = '' elif isinstance ( node . text , str ) : val = ' %s' % repr ( node . text . strip ( ) ) else : val = ' %s' % repr ( node . text ) # node.text can be a tuple output . write ( encode ( indent + striptag ( node . tag ) + attrs + val + '\n' ) ) for sub_node in node : _display ( sub_node , indent + ' ' , expandattrs , expandvals , output )
Core function to display a Node object
157
7
232,261
def to_literal ( self ) : if not self . nodes : return ( self . tag , self . attrib , self . text , [ ] ) else : return ( self . tag , self . attrib , self . text , list ( map ( to_literal , self . nodes ) ) )
Convert the node into a literal Python object
65
9
232,262
def pprint ( self , stream = None , indent = 1 , width = 80 , depth = None ) : pp . pprint ( to_literal ( self ) , stream , indent , width , depth )
Pretty print the underlying literal Python object
44
7
232,263
def read_nodes ( fname , filter_elem , nodefactory = Node , remove_comments = True ) : try : for _ , el in iterparse ( fname , remove_comments = remove_comments ) : if filter_elem ( el ) : yield node_from_elem ( el , nodefactory ) el . clear ( ) # save memory except Exception : etype , exc , tb = sys . exc_info ( ) msg = str ( exc ) if not str ( fname ) in msg : msg = '%s in %s' % ( msg , fname ) raise_ ( etype , msg , tb )
Convert an XML file into a lazy iterator over Node objects satifying the given specification i . e . a function element - > boolean .
141
28
232,264
def node_from_xml ( xmlfile , nodefactory = Node ) : root = parse ( xmlfile ) . getroot ( ) return node_from_elem ( root , nodefactory )
Convert a . xml file into a Node object .
47
11
232,265
def node_from_ini ( ini_file , nodefactory = Node , root_name = 'ini' ) : fileobj = open ( ini_file ) if isinstance ( ini_file , str ) else ini_file cfp = configparser . RawConfigParser ( ) cfp . read_file ( fileobj ) root = nodefactory ( root_name ) sections = cfp . sections ( ) for section in sections : params = dict ( cfp . items ( section ) ) root . append ( Node ( section , params ) ) return root
Convert a . ini file into a Node object .
122
12
232,266
def node_to_ini ( node , output = sys . stdout ) : for subnode in node : output . write ( u'\n[%s]\n' % subnode . tag ) for name , value in sorted ( subnode . attrib . items ( ) ) : output . write ( u'%s=%s\n' % ( name , value ) ) output . flush ( )
Convert a Node object with the right structure into a . ini file .
88
16
232,267
def node_copy ( node , nodefactory = Node ) : return nodefactory ( node . tag , node . attrib . copy ( ) , node . text , [ node_copy ( n , nodefactory ) for n in node ] )
Make a deep copy of the node
53
7
232,268
def context ( fname , node ) : try : yield node except Exception : etype , exc , tb = sys . exc_info ( ) msg = 'node %s: %s, line %s of %s' % ( striptag ( node . tag ) , exc , getattr ( node , 'lineno' , '?' ) , fname ) raise_ ( etype , msg , tb )
Context manager managing exceptions and adding line number of the current node and name of the current file to the error message .
89
23
232,269
def shorten ( self , tag ) : if tag . startswith ( '{' ) : ns , _tag = tag . rsplit ( '}' ) tag = self . nsmap . get ( ns [ 1 : ] , '' ) + _tag return tag
Get the short representation of a fully qualified tag
57
9
232,270
def _write ( self , text ) : spaces = ' ' * ( self . indent * self . indentlevel ) t = spaces + text . strip ( ) + '\n' if hasattr ( t , 'encode' ) : t = t . encode ( self . encoding , 'xmlcharrefreplace' ) self . stream . write ( t )
Write text by respecting the current indentlevel
75
8
232,271
def start_tag ( self , name , attrs = None ) : if not attrs : self . _write ( '<%s>' % name ) else : self . _write ( '<' + name ) for ( name , value ) in sorted ( attrs . items ( ) ) : self . _write ( ' %s=%s' % ( name , quoteattr ( scientificformat ( value ) ) ) ) self . _write ( '>' ) self . indentlevel += 1
Open an XML tag
106
4
232,272
def getnodes ( self , name ) : for node in self . nodes : if striptag ( node . tag ) == name : yield node
Return the direct subnodes with name name
31
9
232,273
def append ( self , node ) : if not isinstance ( node , self . __class__ ) : raise TypeError ( 'Expected Node instance, got %r' % node ) self . nodes . append ( node )
Append a new subnode
47
6
232,274
def parse_bytes ( self , bytestr , isfinal = True ) : with self . _context ( ) : self . filename = None self . p . Parse ( bytestr , isfinal ) return self . _root
Parse a byte string . If the string is very large split it in chuncks and parse each chunk with isfinal = False then parse an empty chunk with isfinal = True .
49
38
232,275
def parse_file ( self , file_or_fname ) : with self . _context ( ) : if hasattr ( file_or_fname , 'read' ) : self . filename = getattr ( file_or_fname , 'name' , file_or_fname . __class__ . __name__ ) self . p . ParseFile ( file_or_fname ) else : self . filename = file_or_fname with open ( file_or_fname , 'rb' ) as f : self . p . ParseFile ( f ) return self . _root
Parse a file or a filename
131
7
232,276
def _get_magnitudes_from_spacing ( self , magnitudes , delta_m ) : min_mag = np . min ( magnitudes ) max_mag = np . max ( magnitudes ) if ( max_mag - min_mag ) < delta_m : raise ValueError ( 'Bin width greater than magnitude range!' ) mag_bins = np . arange ( np . floor ( min_mag ) , np . ceil ( max_mag ) , delta_m ) # Check to see if there are magnitudes in lower and upper bins is_mag = np . logical_and ( mag_bins - max_mag < delta_m , min_mag - mag_bins < delta_m ) mag_bins = mag_bins [ is_mag ] return mag_bins
If a single magnitude spacing is input then create the bins
176
11
232,277
def _merge_data ( dat1 , dat2 ) : cnt = 0 for key in dat1 : flg1 = len ( dat1 [ key ] ) > 0 flg2 = len ( dat2 [ key ] ) > 0 if flg1 != flg2 : cnt += 1 if cnt : raise Warning ( 'Cannot merge catalogues with different' + ' attributes' ) return None else : for key in dat1 : if isinstance ( dat1 [ key ] , np . ndarray ) : dat1 [ key ] = np . concatenate ( ( dat1 [ key ] , dat2 [ key ] ) , axis = 0 ) elif isinstance ( dat1 [ key ] , list ) : dat1 [ key ] += dat2 [ key ] else : raise ValueError ( 'Unknown type' ) return dat1
Merge two data dictionaries containing catalogue data
182
9
232,278
def _get_row_str ( self , i ) : row_data = [ "{:s}" . format ( self . data [ 'eventID' ] [ i ] ) , "{:g}" . format ( self . data [ 'year' ] [ i ] ) , "{:g}" . format ( self . data [ 'month' ] [ i ] ) , "{:g}" . format ( self . data [ 'day' ] [ i ] ) , "{:g}" . format ( self . data [ 'hour' ] [ i ] ) , "{:g}" . format ( self . data [ 'minute' ] [ i ] ) , "{:.1f}" . format ( self . data [ 'second' ] [ i ] ) , "{:.3f}" . format ( self . data [ 'longitude' ] [ i ] ) , "{:.3f}" . format ( self . data [ 'latitude' ] [ i ] ) , "{:.1f}" . format ( self . data [ 'depth' ] [ i ] ) , "{:.1f}" . format ( self . data [ 'magnitude' ] [ i ] ) ] return " " . join ( row_data )
Returns a string representation of the key information in a row
263
11
232,279
def load_to_array ( self , keys ) : # Preallocate the numpy array data = np . empty ( ( len ( self . data [ keys [ 0 ] ] ) , len ( keys ) ) ) for i in range ( 0 , len ( self . data [ keys [ 0 ] ] ) ) : for j , key in enumerate ( keys ) : data [ i , j ] = self . data [ key ] [ i ] return data
This loads the data contained in the catalogue into a numpy array . The method works only for float data
96
21
232,280
def load_from_array ( self , keys , data_array ) : if len ( keys ) != np . shape ( data_array ) [ 1 ] : raise ValueError ( 'Key list does not match shape of array!' ) for i , key in enumerate ( keys ) : if key in self . INT_ATTRIBUTE_LIST : self . data [ key ] = data_array [ : , i ] . astype ( int ) else : self . data [ key ] = data_array [ : , i ] if key not in self . TOTAL_ATTRIBUTE_LIST : print ( 'Key %s not a recognised catalogue attribute' % key ) self . update_end_year ( )
This loads the data contained in an array into the catalogue object
151
12
232,281
def catalogue_mt_filter ( self , mt_table , flag = None ) : if flag is None : # No flag defined, therefore all events are initially valid flag = np . ones ( self . get_number_events ( ) , dtype = bool ) for comp_val in mt_table : id0 = np . logical_and ( self . data [ 'year' ] . astype ( float ) < comp_val [ 0 ] , self . data [ 'magnitude' ] < comp_val [ 1 ] ) print ( id0 ) flag [ id0 ] = False if not np . all ( flag ) : self . purge_catalogue ( flag )
Filter the catalogue using a magnitude - time table . The table has two columns and n - rows .
143
20
232,282
def get_bounding_box ( self ) : return ( np . min ( self . data [ "longitude" ] ) , np . max ( self . data [ "longitude" ] ) , np . min ( self . data [ "latitude" ] ) , np . max ( self . data [ "latitude" ] ) )
Returns the bounding box of the catalogue
73
8
232,283
def get_decimal_time ( self ) : return decimal_time ( self . data [ 'year' ] , self . data [ 'month' ] , self . data [ 'day' ] , self . data [ 'hour' ] , self . data [ 'minute' ] , self . data [ 'second' ] )
Returns the time of the catalogue as a decimal
70
9
232,284
def sort_catalogue_chronologically ( self ) : dec_time = self . get_decimal_time ( ) idx = np . argsort ( dec_time ) if np . all ( ( idx [ 1 : ] - idx [ : - 1 ] ) > 0. ) : # Catalogue was already in chronological order return self . select_catalogue_events ( idx )
Sorts the catalogue into chronological order
85
7
232,285
def purge_catalogue ( self , flag_vector ) : id0 = np . where ( flag_vector ) [ 0 ] self . select_catalogue_events ( id0 ) self . get_number_events ( )
Purges present catalogue with invalid events defined by flag_vector
48
12
232,286
def select_catalogue_events ( self , id0 ) : for key in self . data : if isinstance ( self . data [ key ] , np . ndarray ) and len ( self . data [ key ] ) > 0 : # Dictionary element is numpy array - use logical indexing self . data [ key ] = self . data [ key ] [ id0 ] elif isinstance ( self . data [ key ] , list ) and len ( self . data [ key ] ) > 0 : # Dictionary element is list self . data [ key ] = [ self . data [ key ] [ iloc ] for iloc in id0 ] else : continue
Orders the events in the catalogue according to an indexing vector .
140
14
232,287
def get_depth_distribution ( self , depth_bins , normalisation = False , bootstrap = None ) : if len ( self . data [ 'depth' ] ) == 0 : # If depth information is missing raise ValueError ( 'Depths missing in catalogue' ) if len ( self . data [ 'depthError' ] ) == 0 : self . data [ 'depthError' ] = np . zeros ( self . get_number_events ( ) , dtype = float ) return bootstrap_histogram_1D ( self . data [ 'depth' ] , depth_bins , self . data [ 'depthError' ] , normalisation = normalisation , number_bootstraps = bootstrap , boundaries = ( 0. , None ) )
Gets the depth distribution of the earthquake catalogue to return a single histogram . Depths may be normalised . If uncertainties are found in the catalogue the distrbution may be bootstrap sampled
162
38
232,288
def get_depth_pmf ( self , depth_bins , default_depth = 5.0 , bootstrap = None ) : if len ( self . data [ 'depth' ] ) == 0 : # If depth information is missing return PMF ( [ ( 1.0 , default_depth ) ] ) # Get the depth distribution depth_hist = self . get_depth_distribution ( depth_bins , normalisation = True , bootstrap = bootstrap ) # If the histogram does not sum to 1.0 then remove the difference # from the lowest bin depth_hist = np . around ( depth_hist , 3 ) while depth_hist . sum ( ) - 1.0 : depth_hist [ - 1 ] -= depth_hist . sum ( ) - 1.0 depth_hist = np . around ( depth_hist , 3 ) pmf_list = [ ] for iloc , prob in enumerate ( depth_hist ) : pmf_list . append ( ( prob , ( depth_bins [ iloc ] + depth_bins [ iloc + 1 ] ) / 2.0 ) ) return PMF ( pmf_list )
Returns the depth distribution of the catalogue as a probability mass function
248
12
232,289
def get_magnitude_depth_distribution ( self , magnitude_bins , depth_bins , normalisation = False , bootstrap = None ) : if len ( self . data [ 'depth' ] ) == 0 : # If depth information is missing raise ValueError ( 'Depths missing in catalogue' ) if len ( self . data [ 'depthError' ] ) == 0 : self . data [ 'depthError' ] = np . zeros ( self . get_number_events ( ) , dtype = float ) if len ( self . data [ 'sigmaMagnitude' ] ) == 0 : self . data [ 'sigmaMagnitude' ] = np . zeros ( self . get_number_events ( ) , dtype = float ) return bootstrap_histogram_2D ( self . data [ 'magnitude' ] , self . data [ 'depth' ] , magnitude_bins , depth_bins , boundaries = [ ( 0. , None ) , ( None , None ) ] , xsigma = self . data [ 'sigmaMagnitude' ] , ysigma = self . data [ 'depthError' ] , normalisation = normalisation , number_bootstraps = bootstrap )
Returns a 2 - D magnitude - depth histogram for the catalogue
265
13
232,290
def get_magnitude_time_distribution ( self , magnitude_bins , time_bins , normalisation = False , bootstrap = None ) : return bootstrap_histogram_2D ( self . get_decimal_time ( ) , self . data [ 'magnitude' ] , time_bins , magnitude_bins , xsigma = np . zeros ( self . get_number_events ( ) ) , ysigma = self . data [ 'sigmaMagnitude' ] , normalisation = normalisation , number_bootstraps = bootstrap )
Returns a 2 - D histogram indicating the number of earthquakes in a set of time - magnitude bins . Time is in decimal years!
128
27
232,291
def concatenate ( self , catalogue ) : atts = getattr ( self , 'data' ) attn = getattr ( catalogue , 'data' ) data = _merge_data ( atts , attn ) if data is not None : setattr ( self , 'data' , data ) for attrib in vars ( self ) : atts = getattr ( self , attrib ) attn = getattr ( catalogue , attrib ) if attrib is 'end_year' : setattr ( self , attrib , max ( atts , attn ) ) elif attrib is 'start_year' : setattr ( self , attrib , min ( atts , attn ) ) elif attrib is 'data' : pass elif attrib is 'number_earthquakes' : setattr ( self , attrib , atts + attn ) elif attrib is 'processes' : if atts != attn : raise ValueError ( 'The catalogues cannot be merged' + ' since the they have' + ' a different processing history' ) else : raise ValueError ( 'unknown attribute: %s' % attrib ) self . sort_catalogue_chronologically ( )
This method attaches one catalogue to the current one
261
9
232,292
def expose_outputs ( dstore , owner = getpass . getuser ( ) , status = 'complete' ) : oq = dstore [ 'oqparam' ] exportable = set ( ekey [ 0 ] for ekey in export . export ) calcmode = oq . calculation_mode dskeys = set ( dstore ) & exportable # exportable datastore keys dskeys . add ( 'fullreport' ) rlzs = dstore [ 'csm_info' ] . rlzs if len ( rlzs ) > 1 : dskeys . add ( 'realizations' ) if len ( dstore [ 'csm_info/sg_data' ] ) > 1 : # export sourcegroups.csv dskeys . add ( 'sourcegroups' ) hdf5 = dstore . hdf5 if 'hcurves-stats' in hdf5 or 'hcurves-rlzs' in hdf5 : if oq . hazard_stats ( ) or oq . individual_curves or len ( rlzs ) == 1 : dskeys . add ( 'hcurves' ) if oq . uniform_hazard_spectra : dskeys . add ( 'uhs' ) # export them if oq . hazard_maps : dskeys . add ( 'hmaps' ) # export them if 'avg_losses-stats' in dstore or ( 'avg_losses-rlzs' in dstore and len ( rlzs ) ) : dskeys . add ( 'avg_losses-stats' ) if 'curves-rlzs' in dstore and len ( rlzs ) == 1 : dskeys . add ( 'loss_curves-rlzs' ) if 'curves-stats' in dstore and len ( rlzs ) > 1 : dskeys . add ( 'loss_curves-stats' ) if oq . conditional_loss_poes : # expose loss_maps outputs if 'loss_curves-stats' in dstore : dskeys . add ( 'loss_maps-stats' ) if 'all_loss_ratios' in dskeys : dskeys . remove ( 'all_loss_ratios' ) # export only specific IDs if 'ruptures' in dskeys and 'scenario' in calcmode : exportable . remove ( 'ruptures' ) # do not export, as requested by Vitor if 'rup_loss_table' in dskeys : # keep it hidden for the moment dskeys . remove ( 'rup_loss_table' ) if 'hmaps' in dskeys and not oq . hazard_maps : dskeys . remove ( 'hmaps' ) # do not export the hazard maps if logs . dbcmd ( 'get_job' , dstore . calc_id ) is None : # the calculation has not been imported in the db yet logs . dbcmd ( 'import_job' , dstore . calc_id , oq . calculation_mode , oq . description + ' [parent]' , owner , status , oq . hazard_calculation_id , dstore . datadir ) keysize = [ ] for key in sorted ( dskeys & exportable ) : try : size_mb = dstore . get_attr ( key , 'nbytes' ) / MB except ( KeyError , AttributeError ) : size_mb = None keysize . append ( ( key , size_mb ) ) ds_size = os . path . getsize ( dstore . filename ) / MB logs . dbcmd ( 'create_outputs' , dstore . calc_id , keysize , ds_size )
Build a correspondence between the outputs in the datastore and the ones in the database .
824
18
232,293
def raiseMasterKilled ( signum , _stack ) : # Disable further CTRL-C to allow tasks revocation when Celery is used if OQ_DISTRIBUTE . startswith ( 'celery' ) : signal . signal ( signal . SIGINT , inhibitSigInt ) msg = 'Received a signal %d' % signum if signum in ( signal . SIGTERM , signal . SIGINT ) : msg = 'The openquake master process was killed manually' # kill the calculation only if os.getppid() != _PPID, i.e. the controlling # terminal died; in the workers, do nothing # NB: there is no SIGHUP on Windows if hasattr ( signal , 'SIGHUP' ) : if signum == signal . SIGHUP : if os . getppid ( ) == _PPID : return else : msg = 'The openquake master lost its controlling terminal' raise MasterKilled ( msg )
When a SIGTERM is received raise the MasterKilled exception with an appropriate error message .
207
19
232,294
def job_from_file ( job_ini , job_id , username , * * kw ) : hc_id = kw . get ( 'hazard_calculation_id' ) try : oq = readinput . get_oqparam ( job_ini , hc_id = hc_id ) except Exception : logs . dbcmd ( 'finish' , job_id , 'failed' ) raise if 'calculation_mode' in kw : oq . calculation_mode = kw . pop ( 'calculation_mode' ) if 'description' in kw : oq . description = kw . pop ( 'description' ) if 'exposure_file' in kw : # hack used in commands.engine fnames = kw . pop ( 'exposure_file' ) . split ( ) if fnames : oq . inputs [ 'exposure' ] = fnames elif 'exposure' in oq . inputs : del oq . inputs [ 'exposure' ] logs . dbcmd ( 'update_job' , job_id , dict ( calculation_mode = oq . calculation_mode , description = oq . description , user_name = username , hazard_calculation_id = hc_id ) ) return oq
Create a full job profile from a job config file .
280
11
232,295
def check_obsolete_version ( calculation_mode = 'WebUI' ) : if os . environ . get ( 'JENKINS_URL' ) or os . environ . get ( 'TRAVIS' ) : # avoid flooding our API server with requests from CI systems return headers = { 'User-Agent' : 'OpenQuake Engine %s;%s;%s;%s' % ( __version__ , calculation_mode , platform . platform ( ) , config . distribution . oq_distribute ) } try : req = Request ( OQ_API + '/engine/latest' , headers = headers ) # NB: a timeout < 1 does not work data = urlopen ( req , timeout = 1 ) . read ( ) # bytes tag_name = json . loads ( decode ( data ) ) [ 'tag_name' ] current = version_triple ( __version__ ) latest = version_triple ( tag_name ) except Exception : # page not available or wrong version tag return if current < latest : return ( 'Version %s of the engine is available, but you are ' 'still using version %s' % ( tag_name , __version__ ) ) else : return ''
Check if there is a newer version of the engine .
260
11
232,296
def encode ( val ) : if isinstance ( val , ( list , tuple ) ) : # encode a list or tuple of strings return [ encode ( v ) for v in val ] elif isinstance ( val , str ) : return val . encode ( 'utf-8' ) else : # assume it was an already encoded object return val
Encode a string assuming the encoding is UTF - 8 .
71
12
232,297
def raise_ ( tp , value = None , tb = None ) : if value is not None and isinstance ( tp , Exception ) : raise TypeError ( "instance exception may not have a separate value" ) if value is not None : exc = tp ( value ) else : exc = tp if exc . __traceback__ is not tb : raise exc . with_traceback ( tb ) raise exc
A function that matches the Python 2 . x raise statement . This allows re - raising exceptions with the cls value and traceback on Python 2 and 3 .
91
32
232,298
def plot_pyro ( calc_id = - 1 ) : # NB: matplotlib is imported inside since it is a costly import import matplotlib . pyplot as p dstore = util . read ( calc_id ) sitecol = dstore [ 'sitecol' ] asset_risk = dstore [ 'asset_risk' ] . value pyro , = numpy . where ( dstore [ 'multi_peril' ] [ 'PYRO' ] == 1 ) lons = sitecol . lons [ pyro ] lats = sitecol . lats [ pyro ] p . scatter ( lons , lats , marker = 'o' , color = 'red' ) building_pyro , = numpy . where ( asset_risk [ 'building-PYRO' ] == 1 ) lons = sitecol . lons [ building_pyro ] lats = sitecol . lats [ building_pyro ] p . scatter ( lons , lats , marker = '.' , color = 'green' ) p . show ( )
Plot the pyroclastic cloud and the assets
231
10
232,299
def get_resampled_coordinates ( lons , lats ) : num_coords = len ( lons ) assert num_coords == len ( lats ) lons1 = numpy . array ( lons ) lats1 = numpy . array ( lats ) lons2 = numpy . concatenate ( ( lons1 [ 1 : ] , lons1 [ : 1 ] ) ) lats2 = numpy . concatenate ( ( lats1 [ 1 : ] , lats1 [ : 1 ] ) ) distances = geodetic . geodetic_distance ( lons1 , lats1 , lons2 , lats2 ) resampled_lons = [ lons [ 0 ] ] resampled_lats = [ lats [ 0 ] ] for i in range ( num_coords ) : next_point = ( i + 1 ) % num_coords lon1 , lat1 = lons [ i ] , lats [ i ] lon2 , lat2 = lons [ next_point ] , lats [ next_point ] distance = distances [ i ] num_points = int ( distance / UPSAMPLING_STEP_KM ) + 1 if num_points >= 2 : # We need to increase the resolution of this arc by adding new # points. new_lons , new_lats , _ = geodetic . npoints_between ( lon1 , lat1 , 0 , lon2 , lat2 , 0 , num_points ) resampled_lons . extend ( new_lons [ 1 : ] ) resampled_lats . extend ( new_lats [ 1 : ] ) else : resampled_lons . append ( lon2 ) resampled_lats . append ( lat2 ) # NB: we cut off the last point because it repeats the first one return numpy . array ( resampled_lons [ : - 1 ] ) , numpy . array ( resampled_lats [ : - 1 ] )
Resample polygon line segments and return the coordinates of the new vertices . This limits distortions when projecting a polygon onto a spherical surface .
447
29