idx
int64
0
251k
question
stringlengths
53
3.53k
target
stringlengths
5
1.23k
len_question
int64
20
893
len_target
int64
3
238
232,000
def get_model ( self , sm_id ) : sm = self . source_models [ sm_id ] if self . source_model_lt . num_samples : self . source_model_lt . num_samples = sm . samples new = self . __class__ ( self . gsim_lt , self . source_model_lt , [ sm ] , self . optimize_same_id ) new . sm_id = sm_id return new
Extract a CompositeSourceModel instance containing the single model of index sm_id .
100
17
232,001
def new ( self , sources_by_grp ) : source_models = [ ] for sm in self . source_models : src_groups = [ ] for src_group in sm . src_groups : sg = copy . copy ( src_group ) sg . sources = sorted ( sources_by_grp . get ( sg . id , [ ] ) , key = operator . attrgetter ( 'id' ) ) src_groups . append ( sg ) newsm = logictree . LtSourceModel ( sm . names , sm . weight , sm . path , src_groups , sm . num_gsim_paths , sm . ordinal , sm . samples ) source_models . append ( newsm ) new = self . __class__ ( self . gsim_lt , self . source_model_lt , source_models , self . optimize_same_id ) new . info . update_eff_ruptures ( new . get_num_ruptures ( ) ) new . info . tot_weight = new . get_weight ( ) return new
Generate a new CompositeSourceModel from the given dictionary .
233
12
232,002
def check_dupl_sources ( self ) : # used in print_csm_info dd = collections . defaultdict ( list ) for src_group in self . src_groups : for src in src_group : try : srcid = src . source_id except AttributeError : # src is a Node object srcid = src [ 'id' ] dd [ srcid ] . append ( src ) dupl = [ ] for srcid , srcs in sorted ( dd . items ( ) ) : if len ( srcs ) > 1 : _assert_equal_sources ( srcs ) dupl . append ( srcs ) return dupl
Extracts duplicated sources i . e . sources with the same source_id in different source groups . Raise an exception if there are sources with the same ID which are not duplicated .
137
39
232,003
def get_sources ( self , kind = 'all' ) : assert kind in ( 'all' , 'indep' , 'mutex' ) , kind sources = [ ] for sm in self . source_models : for src_group in sm . src_groups : if kind in ( 'all' , src_group . src_interdep ) : for src in src_group : if sm . samples > 1 : src . samples = sm . samples sources . append ( src ) return sources
Extract the sources contained in the source models by optionally filtering and splitting them depending on the passed parameter .
106
21
232,004
def init_serials ( self , ses_seed ) : sources = self . get_sources ( ) serial = ses_seed for src in sources : nr = src . num_ruptures src . serial = serial serial += nr
Generate unique seeds for each rupture with numpy . arange . This should be called only in event based calculators
53
24
232,005
def get_maxweight ( self , weight , concurrent_tasks , minweight = MINWEIGHT ) : totweight = self . get_weight ( weight ) ct = concurrent_tasks or 1 mw = math . ceil ( totweight / ct ) return max ( mw , minweight )
Return an appropriate maxweight for use in the block_splitter
66
13
232,006
def weight_list_to_tuple ( data , attr_name ) : if len ( data [ 'Value' ] ) != len ( data [ 'Weight' ] ) : raise ValueError ( 'Number of weights do not correspond to number of ' 'attributes in %s' % attr_name ) weight = np . array ( data [ 'Weight' ] ) if fabs ( np . sum ( weight ) - 1. ) > 1E-7 : raise ValueError ( 'Weights do not sum to 1.0 in %s' % attr_name ) data_tuple = [ ] for iloc , value in enumerate ( data [ 'Value' ] ) : data_tuple . append ( ( value , weight [ iloc ] ) ) return data_tuple
Converts a list of values and corresponding weights to a tuple of values
170
14
232,007
def parse_tect_region_dict_to_tuples ( region_dict ) : output_region_dict = [ ] tuple_keys = [ 'Displacement_Length_Ratio' , 'Shear_Modulus' ] # Convert MSR string name to openquake.hazardlib.scalerel object for region in region_dict : for val_name in tuple_keys : region [ val_name ] = weight_list_to_tuple ( region [ val_name ] , val_name ) # MSR works differently - so call get_scaling_relation_tuple region [ 'Magnitude_Scaling_Relation' ] = weight_list_to_tuple ( region [ 'Magnitude_Scaling_Relation' ] , 'Magnitude Scaling Relation' ) output_region_dict . append ( region ) return output_region_dict
Parses the tectonic regionalisation dictionary attributes to tuples
193
14
232,008
def get_scaling_relation_tuple ( msr_dict ) : # Convert MSR string name to openquake.hazardlib.scalerel object for iloc , value in enumerate ( msr_dict [ 'Value' ] ) : if not value in SCALE_REL_MAP . keys ( ) : raise ValueError ( 'Scaling relation %s not supported!' % value ) msr_dict [ 'Value' ] [ iloc ] = SCALE_REL_MAP [ value ] ( ) return weight_list_to_tuple ( msr_dict , 'Magnitude Scaling Relation' )
For a dictionary of scaling relation values convert string list to object list and then to tuple
136
17
232,009
def read_file ( self , mesh_spacing = 1.0 ) : # Process the tectonic regionalisation tectonic_reg = self . process_tectonic_regionalisation ( ) model = mtkActiveFaultModel ( self . data [ 'Fault_Model_ID' ] , self . data [ 'Fault_Model_Name' ] ) for fault in self . data [ 'Fault_Model' ] : fault_geometry = self . read_fault_geometry ( fault [ 'Fault_Geometry' ] , mesh_spacing ) if fault [ 'Shear_Modulus' ] : fault [ 'Shear_Modulus' ] = weight_list_to_tuple ( fault [ 'Shear_Modulus' ] , '%s Shear Modulus' % fault [ 'ID' ] ) if fault [ 'Displacement_Length_Ratio' ] : fault [ 'Displacement_Length_Ratio' ] = weight_list_to_tuple ( fault [ 'Displacement_Length_Ratio' ] , '%s Displacement to Length Ratio' % fault [ 'ID' ] ) fault_source = mtkActiveFault ( fault [ 'ID' ] , fault [ 'Fault_Name' ] , fault_geometry , weight_list_to_tuple ( fault [ 'Slip' ] , '%s - Slip' % fault [ 'ID' ] ) , float ( fault [ 'Rake' ] ) , fault [ 'Tectonic_Region' ] , float ( fault [ 'Aseismic' ] ) , weight_list_to_tuple ( fault [ 'Scaling_Relation_Sigma' ] , '%s Scaling_Relation_Sigma' % fault [ 'ID' ] ) , neotectonic_fault = None , scale_rel = get_scaling_relation_tuple ( fault [ 'Magnitude_Scaling_Relation' ] ) , aspect_ratio = fault [ 'Aspect_Ratio' ] , shear_modulus = fault [ 'Shear_Modulus' ] , disp_length_ratio = fault [ 'Displacement_Length_Ratio' ] ) if tectonic_reg : fault_source . get_tectonic_regionalisation ( tectonic_reg , fault [ 'Tectonic_Region' ] ) assert isinstance ( fault [ 'MFD_Model' ] , list ) fault_source . generate_config_set ( fault [ 'MFD_Model' ] ) model . faults . append ( fault_source ) return model , tectonic_reg
Reads the file and returns an instance of the FaultSource class .
584
14
232,010
def process_tectonic_regionalisation ( self ) : if 'tectonic_regionalisation' in self . data . keys ( ) : tectonic_reg = TectonicRegionalisation ( ) tectonic_reg . populate_regions ( parse_tect_region_dict_to_tuples ( self . data [ 'tectonic_regionalisation' ] ) ) else : tectonic_reg = None return tectonic_reg
Processes the tectonic regionalisation from the yaml file
103
13
232,011
def read_fault_geometry ( self , geo_dict , mesh_spacing = 1.0 ) : if geo_dict [ 'Fault_Typology' ] == 'Simple' : # Simple fault geometry raw_trace = geo_dict [ 'Fault_Trace' ] trace = Line ( [ Point ( raw_trace [ ival ] , raw_trace [ ival + 1 ] ) for ival in range ( 0 , len ( raw_trace ) , 2 ) ] ) geometry = SimpleFaultGeometry ( trace , geo_dict [ 'Dip' ] , geo_dict [ 'Upper_Depth' ] , geo_dict [ 'Lower_Depth' ] , mesh_spacing ) elif geo_dict [ 'Fault_Typology' ] == 'Complex' : # Complex Fault Typology trace = [ ] for raw_trace in geo_dict [ 'Fault_Trace' ] : fault_edge = Line ( [ Point ( raw_trace [ ival ] , raw_trace [ ival + 1 ] , raw_trace [ ival + 2 ] ) for ival in range ( 0 , len ( raw_trace ) , 3 ) ] ) trace . append ( fault_edge ) geometry = ComplexFaultGeometry ( trace , mesh_spacing ) else : raise ValueError ( 'Unrecognised or unsupported fault geometry!' ) return geometry
Creates the fault geometry from the parameters specified in the dictionary .
299
13
232,012
def _get_distance_scaling_term ( self , C , mag , rrup ) : return ( C [ "r1" ] + C [ "r2" ] * mag ) * np . log10 ( rrup + C [ "r3" ] )
Returns the distance scaling parameter
58
5
232,013
def _get_style_of_faulting_term ( self , C , rake ) : if rake > - 150.0 and rake <= - 30.0 : return C [ 'fN' ] elif rake > 30.0 and rake <= 150.0 : return C [ 'fR' ] else : return C [ 'fSS' ]
Returns the style of faulting term . Cauzzi et al . determind SOF from the plunge of the B - T - and P - axes . For consistency with existing GMPEs the Wells & Coppersmith model is preferred
76
48
232,014
def _get_site_amplification_term ( self , C , vs30 ) : s_b , s_c , s_d = self . _get_site_dummy_variables ( vs30 ) return ( C [ "sB" ] * s_b ) + ( C [ "sC" ] * s_c ) + ( C [ "sD" ] * s_d )
Returns the site amplification term on the basis of Eurocode 8 site class
90
14
232,015
def _get_site_dummy_variables ( self , vs30 ) : s_b = np . zeros_like ( vs30 ) s_c = np . zeros_like ( vs30 ) s_d = np . zeros_like ( vs30 ) s_b [ np . logical_and ( vs30 >= 360. , vs30 < 800. ) ] = 1.0 s_c [ np . logical_and ( vs30 >= 180. , vs30 < 360. ) ] = 1.0 s_d [ vs30 < 180 ] = 1.0 return s_b , s_c , s_d
Returns the Eurocode 8 site class dummy variable
139
9
232,016
def get_recurrence ( self , config ) : model = MFD_MAP [ config [ 'Model_Name' ] ] ( ) model . setUp ( config ) model . get_mmax ( config , self . msr , self . rake , self . area ) model . mmax = model . mmax + ( self . msr_sigma * model . mmax_sigma ) # As the Anderson & Luco arbitrary model requires the input of the # displacement to length ratio if 'AndersonLucoAreaMmax' in config [ 'Model_Name' ] : if not self . disp_length_ratio : # If not defined then default to 1.25E-5 self . disp_length_ratio = 1.25E-5 min_mag , bin_width , occur_rates = model . get_mfd ( self . slip , self . area , self . shear_modulus , self . disp_length_ratio ) else : min_mag , bin_width , occur_rates = model . get_mfd ( self . slip , self . area , self . shear_modulus ) self . recurrence = IncrementalMFD ( min_mag , bin_width , occur_rates ) self . magnitudes = min_mag + np . cumsum ( bin_width * np . ones ( len ( occur_rates ) , dtype = float ) ) - bin_width self . max_mag = np . max ( self . magnitudes )
Calculates the recurrence model for the given settings as an instance of the openquake . hmtk . models . IncrementalMFD
320
30
232,017
def get_tectonic_regionalisation ( self , regionalisation , region_type = None ) : if region_type : self . trt = region_type if not self . trt in regionalisation . key_list : raise ValueError ( 'Tectonic region classification missing or ' 'not defined in regionalisation' ) for iloc , key_val in enumerate ( regionalisation . key_list ) : if self . trt in key_val : self . regionalisation = regionalisation . regionalisation [ iloc ] # Update undefined shear modulus from tectonic regionalisation if not self . shear_modulus : self . shear_modulus = self . regionalisation . shear_modulus # Update undefined scaling relation from tectonic # regionalisation if not self . msr : self . msr = self . regionalisation . scaling_rel # Update undefined displacement to length ratio from tectonic # regionalisation if not self . disp_length_ratio : self . disp_length_ratio = self . regionalisation . disp_length_ratio break return
Defines the tectonic region and updates the shear modulus magnitude scaling relation and displacement to length ratio using the regional values if not previously defined for the fault
234
33
232,018
def select_catalogue ( self , selector , distance , distance_metric = "rupture" , upper_eq_depth = None , lower_eq_depth = None ) : if selector . catalogue . get_number_events ( ) < 1 : raise ValueError ( 'No events found in catalogue!' ) # rupture metric is selected if ( 'rupture' in distance_metric ) : # Use rupture distance self . catalogue = selector . within_rupture_distance ( self . geometry . surface , distance , upper_depth = upper_eq_depth , lower_depth = lower_eq_depth ) else : # Use Joyner-Boore distance self . catalogue = selector . within_joyner_boore_distance ( self . geometry . surface , distance , upper_depth = upper_eq_depth , lower_depth = lower_eq_depth )
Select earthquakes within a specied distance of the fault
183
10
232,019
def generate_config_set ( self , config ) : if isinstance ( config , dict ) : # Configuration list contains only one element self . config = [ ( config , 1.0 ) ] elif isinstance ( config , list ) : # Multiple configurations with correscponding weights total_weight = 0. self . config = [ ] for params in config : weight = params [ 'Model_Weight' ] total_weight += params [ 'Model_Weight' ] self . config . append ( ( params , weight ) ) if fabs ( total_weight - 1.0 ) > 1E-7 : raise ValueError ( 'MFD config weights do not sum to 1.0 for ' 'fault %s' % self . id ) else : raise ValueError ( 'MFD config must be input as dictionary or list!' )
Generates a list of magnitude frequency distributions and renders as a tuple
177
13
232,020
def collapse_branches ( self , mmin , bin_width , mmax ) : master_mags = np . arange ( mmin , mmax + ( bin_width / 2. ) , bin_width ) master_rates = np . zeros ( len ( master_mags ) , dtype = float ) for model in self . mfd_models : id0 = np . logical_and ( master_mags >= np . min ( model . magnitudes ) - 1E-9 , master_mags <= np . max ( model . magnitudes ) + 1E-9 ) # Use interpolation in log10-y values yvals = np . log10 ( model . recurrence . occur_rates ) interp_y = np . interp ( master_mags [ id0 ] , model . magnitudes , yvals ) master_rates [ id0 ] = master_rates [ id0 ] + ( model . weight * 10. ** interp_y ) return IncrementalMFD ( mmin , bin_width , master_rates )
Collapse the logic tree branches into a single IncrementalMFD
229
13
232,021
def generate_fault_source_model ( self ) : source_model = [ ] model_weight = [ ] for iloc in range ( 0 , self . get_number_mfd_models ( ) ) : model_mfd = EvenlyDiscretizedMFD ( self . mfd [ 0 ] [ iloc ] . min_mag , self . mfd [ 0 ] [ iloc ] . bin_width , self . mfd [ 0 ] [ iloc ] . occur_rates . tolist ( ) ) if isinstance ( self . geometry , ComplexFaultGeometry ) : # Complex fault class source = mtkComplexFaultSource ( self . id , self . name , self . trt , self . geometry . surface , self . mfd [ 2 ] [ iloc ] , self . rupt_aspect_ratio , model_mfd , self . rake ) source . fault_edges = self . geometry . trace else : # Simple Fault source source = mtkSimpleFaultSource ( self . id , self . name , self . trt , self . geometry . surface , self . geometry . dip , self . geometry . upper_depth , self . geometry . lower_depth , self . mfd [ 2 ] [ iloc ] , self . rupt_aspect_ratio , model_mfd , self . rake ) source . fault_trace = self . geometry . trace source_model . append ( source ) model_weight . append ( self . mfd [ 1 ] [ iloc ] ) return source_model , model_weight
Creates a resulting openquake . hmtk fault source set .
336
15
232,022
def attrib ( self ) : return dict ( [ ( 'id' , str ( self . id ) ) , ( 'name' , str ( self . name ) ) , ( 'tectonicRegion' , str ( self . trt ) ) , ] )
General XML element attributes for a seismic source as a dict .
56
12
232,023
def attrib ( self ) : return dict ( [ ( 'aValue' , str ( self . a_val ) ) , ( 'bValue' , str ( self . b_val ) ) , ( 'minMag' , str ( self . min_mag ) ) , ( 'maxMag' , str ( self . max_mag ) ) , ] )
An dict of XML element attributes for this MFD .
77
11
232,024
def attrib ( self ) : return dict ( [ ( 'probability' , str ( self . probability ) ) , ( 'strike' , str ( self . strike ) ) , ( 'dip' , str ( self . dip ) ) , ( 'rake' , str ( self . rake ) ) , ] )
A dict of XML element attributes for this NodalPlane .
68
14
232,025
def jbcorrelation ( sites_or_distances , imt , vs30_clustering = False ) : if hasattr ( sites_or_distances , 'mesh' ) : distances = sites_or_distances . mesh . get_distance_matrix ( ) else : distances = sites_or_distances # formulae are from page 1700 if imt . period < 1 : if not vs30_clustering : # case 1, eq. (17) b = 8.5 + 17.2 * imt . period else : # case 2, eq. (18) b = 40.7 - 15.0 * imt . period else : # both cases, eq. (19) b = 22.0 + 3.7 * imt . period # eq. (20) return numpy . exp ( ( - 3.0 / b ) * distances )
Returns the Jayaram - Baker correlation model .
193
9
232,026
def hmcorrelation ( sites_or_distances , imt , uncertainty_multiplier = 0 ) : if hasattr ( sites_or_distances , 'mesh' ) : distances = sites_or_distances . mesh . get_distance_matrix ( ) else : distances = sites_or_distances period = imt . period # Eq. (9) if period < 1.37 : Med_b = 4.231 * period * period - 5.180 * period + 13.392 else : Med_b = 0.140 * period * period - 2.249 * period + 17.050 # Eq. (10) Std_b = ( 4.63e-3 * period * period + 0.028 * period + 0.713 ) # Obtain realization of b if uncertainty_multiplier == 0 : beta = Med_b else : beta = numpy . random . lognormal ( numpy . log ( Med_b ) , Std_b * uncertainty_multiplier ) # Eq. (8) return numpy . exp ( - numpy . power ( ( distances / beta ) , 0.55 ) )
Returns the Heresi - Miranda correlation model .
250
10
232,027
def get_lower_triangle_correlation_matrix ( self , sites , imt ) : return numpy . linalg . cholesky ( self . _get_correlation_matrix ( sites , imt ) )
Get lower - triangle matrix as a result of Cholesky - decomposition of correlation matrix .
51
19
232,028
def start_ebrisk ( rupgetter , srcfilter , param , monitor ) : with monitor ( 'weighting ruptures' ) : rupgetter . set_weights ( srcfilter , param [ 'num_taxonomies' ] ) if rupgetter . weights . sum ( ) <= param [ 'maxweight' ] : yield ebrisk ( rupgetter , srcfilter , param , monitor ) else : for rgetter in rupgetter . split ( param [ 'maxweight' ] ) : yield ebrisk , rgetter , srcfilter , param
Launcher for ebrisk tasks
127
7
232,029
def get_min_max_mag ( self ) : mag , num_bins = self . _get_min_mag_and_num_bins ( ) return mag , mag + self . bin_width * ( num_bins - 1 )
Return the minimum and maximum magnitudes
55
7
232,030
def _get_rate ( self , mag ) : mag_lo = mag - self . bin_width / 2.0 mag_hi = mag + self . bin_width / 2.0 if mag >= self . min_mag and mag < self . char_mag - DELTA_CHAR / 2 : # return rate according to exponential distribution return ( 10 ** ( self . a_val - self . b_val * mag_lo ) - 10 ** ( self . a_val - self . b_val * mag_hi ) ) else : # return characteristic rate (distributed over the characteristic # range) for the given bin width return ( self . char_rate / DELTA_CHAR ) * self . bin_width
Calculate and return the annual occurrence rate for a specific bin .
153
14
232,031
def _get_min_mag_and_num_bins ( self ) : min_mag = round ( self . min_mag / self . bin_width ) * self . bin_width max_mag = ( round ( ( self . char_mag + DELTA_CHAR / 2 ) / self . bin_width ) * self . bin_width ) min_mag += self . bin_width / 2.0 max_mag -= self . bin_width / 2.0 # here we use math round on the result of division and not just # cast it to integer because for some magnitude values that can't # be represented as an IEEE 754 double precisely the result can # look like 7.999999999999 which would become 7 instead of 8 # being naively casted to int so we would lose the last bin. num_bins = int ( round ( ( max_mag - min_mag ) / self . bin_width ) ) + 1 return min_mag , num_bins
Estimate the number of bins in the histogram and return it along with the first bin center value .
211
21
232,032
def get_annual_occurrence_rates ( self ) : mag , num_bins = self . _get_min_mag_and_num_bins ( ) rates = [ ] for i in range ( num_bins ) : rate = self . _get_rate ( mag ) rates . append ( ( mag , rate ) ) mag += self . bin_width return rates
Calculate and return the annual occurrence rates histogram .
83
12
232,033
def create_geometry ( self , input_geometry , upper_depth , lower_depth ) : self . _check_seismogenic_depths ( upper_depth , lower_depth ) # Check/create the geometry class if not isinstance ( input_geometry , Polygon ) : if not isinstance ( input_geometry , np . ndarray ) : raise ValueError ( 'Unrecognised or unsupported geometry ' 'definition' ) if np . shape ( input_geometry ) [ 0 ] < 3 : raise ValueError ( 'Incorrectly formatted polygon geometry -' ' needs three or more vertices' ) geometry = [ ] for row in input_geometry : geometry . append ( Point ( row [ 0 ] , row [ 1 ] , self . upper_depth ) ) self . geometry = Polygon ( geometry ) else : self . geometry = input_geometry
If geometry is defined as a numpy array then create instance of nhlib . geo . polygon . Polygon class otherwise if already instance of class accept class
189
33
232,034
def select_catalogue ( self , selector , distance = None ) : if selector . catalogue . get_number_events ( ) < 1 : raise ValueError ( 'No events found in catalogue!' ) self . catalogue = selector . within_polygon ( self . geometry , distance , upper_depth = self . upper_depth , lower_depth = self . lower_depth ) if self . catalogue . get_number_events ( ) < 5 : # Throw a warning regarding the small number of earthquakes in # the source! warnings . warn ( 'Source %s (%s) has fewer than 5 events' % ( self . id , self . name ) )
Selects the catalogue of earthquakes attributable to the source
137
10
232,035
def new ( self , operation = 'no operation' , * * kw ) : self_vars = vars ( self ) . copy ( ) del self_vars [ 'operation' ] del self_vars [ 'children' ] del self_vars [ 'counts' ] del self_vars [ '_flush' ] new = self . __class__ ( operation ) vars ( new ) . update ( self_vars ) vars ( new ) . update ( kw ) return new
Return a copy of the monitor usable for a different operation .
110
12
232,036
def from_shakemap ( cls , shakemap_array ) : self = object . __new__ ( cls ) self . complete = self n = len ( shakemap_array ) dtype = numpy . dtype ( [ ( p , site_param_dt [ p ] ) for p in 'sids lon lat depth vs30' . split ( ) ] ) self . array = arr = numpy . zeros ( n , dtype ) arr [ 'sids' ] = numpy . arange ( n , dtype = numpy . uint32 ) arr [ 'lon' ] = shakemap_array [ 'lon' ] arr [ 'lat' ] = shakemap_array [ 'lat' ] arr [ 'depth' ] = numpy . zeros ( n ) arr [ 'vs30' ] = shakemap_array [ 'vs30' ] arr . flags . writeable = False return self
Build a site collection from a shakemap array
198
9
232,037
def from_points ( cls , lons , lats , depths = None , sitemodel = None , req_site_params = ( ) ) : assert len ( lons ) < U32LIMIT , len ( lons ) if depths is None : depths = numpy . zeros ( len ( lons ) ) assert len ( lons ) == len ( lats ) == len ( depths ) , ( len ( lons ) , len ( lats ) , len ( depths ) ) self = object . __new__ ( cls ) self . complete = self req = [ 'sids' , 'lon' , 'lat' , 'depth' ] + sorted ( par for par in req_site_params if par not in ( 'lon' , 'lat' ) ) if 'vs30' in req and 'vs30measured' not in req : req . append ( 'vs30measured' ) self . dtype = numpy . dtype ( [ ( p , site_param_dt [ p ] ) for p in req ] ) self . array = arr = numpy . zeros ( len ( lons ) , self . dtype ) arr [ 'sids' ] = numpy . arange ( len ( lons ) , dtype = numpy . uint32 ) arr [ 'lon' ] = fix_lon ( numpy . array ( lons ) ) arr [ 'lat' ] = numpy . array ( lats ) arr [ 'depth' ] = numpy . array ( depths ) if sitemodel is None : pass elif hasattr ( sitemodel , 'reference_vs30_value' ) : # sitemodel is actually an OqParam instance self . _set ( 'vs30' , sitemodel . reference_vs30_value ) self . _set ( 'vs30measured' , sitemodel . reference_vs30_type == 'measured' ) self . _set ( 'z1pt0' , sitemodel . reference_depth_to_1pt0km_per_sec ) self . _set ( 'z2pt5' , sitemodel . reference_depth_to_2pt5km_per_sec ) self . _set ( 'siteclass' , sitemodel . reference_siteclass ) else : for name in sitemodel . dtype . names : if name not in ( 'lon' , 'lat' ) : self . _set ( name , sitemodel [ name ] ) return self
Build the site collection from
552
5
232,038
def make_complete ( self ) : # reset the site indices from 0 to N-1 and set self.complete to self self . array [ 'sids' ] = numpy . arange ( len ( self ) , dtype = numpy . uint32 ) self . complete = self
Turns the site collection into a complete one if needed
61
11
232,039
def filter ( self , mask ) : assert len ( mask ) == len ( self ) , ( len ( mask ) , len ( self ) ) if mask . all ( ) : # all sites satisfy the filter, return # this collection unchanged return self if not mask . any ( ) : # no sites pass the filter, return None return None # extract indices of Trues from the mask indices , = mask . nonzero ( ) return self . filtered ( indices )
Create a SiteCollection with only a subset of sites .
95
11
232,040
def point_at ( self , horizontal_distance , vertical_increment , azimuth ) : lon , lat = geodetic . point_at ( self . longitude , self . latitude , azimuth , horizontal_distance ) return Point ( lon , lat , self . depth + vertical_increment )
Compute the point with given horizontal vertical distances and azimuth from this point .
68
17
232,041
def equally_spaced_points ( self , point , distance ) : lons , lats , depths = geodetic . intervals_between ( self . longitude , self . latitude , self . depth , point . longitude , point . latitude , point . depth , distance ) return [ Point ( lons [ i ] , lats [ i ] , depths [ i ] ) for i in range ( len ( lons ) ) ]
Compute the set of points equally spaced between this point and the given point .
92
16
232,042
def to_polygon ( self , radius ) : assert radius > 0 # avoid circular imports from openquake . hazardlib . geo . polygon import Polygon # get a projection that is centered in the point proj = geo_utils . OrthographicProjection ( self . longitude , self . longitude , self . latitude , self . latitude ) # create a shapely object from a projected point coordinates, # which are supposedly (0, 0) point = shapely . geometry . Point ( * proj ( self . longitude , self . latitude ) ) # extend the point to a shapely polygon using buffer() # and create openquake.hazardlib.geo.polygon.Polygon object from it return Polygon . _from_2d ( point . buffer ( radius ) , proj )
Create a circular polygon with specified radius centered in the point .
172
13
232,043
def closer_than ( self , mesh , radius ) : dists = geodetic . distance ( self . longitude , self . latitude , self . depth , mesh . lons , mesh . lats , 0 if mesh . depths is None else mesh . depths ) return dists <= radius
Check for proximity of points in the mesh .
61
9
232,044
def print_csm_info ( fname ) : oqparam = readinput . get_oqparam ( fname ) csm = readinput . get_composite_source_model ( oqparam , in_memory = False ) print ( csm . info ) print ( 'See http://docs.openquake.org/oq-engine/stable/' 'effective-realizations.html for an explanation' ) rlzs_assoc = csm . info . get_rlzs_assoc ( ) print ( rlzs_assoc ) dupl = [ ( srcs [ 0 ] [ 'id' ] , len ( srcs ) ) for srcs in csm . check_dupl_sources ( ) ] if dupl : print ( rst_table ( dupl , [ 'source_id' , 'multiplicity' ] ) ) tot , pairs = get_pickled_sizes ( rlzs_assoc ) print ( rst_table ( pairs , [ 'attribute' , 'nbytes' ] ) )
Parse the composite source model without instantiating the sources and prints information about its composition and the full logic tree
233
22
232,045
def do_build_reports ( directory ) : for cwd , dirs , files in os . walk ( directory ) : for f in sorted ( files ) : if f in ( 'job.ini' , 'job_h.ini' , 'job_haz.ini' , 'job_hazard.ini' ) : job_ini = os . path . join ( cwd , f ) logging . info ( job_ini ) try : reportwriter . build_report ( job_ini , cwd ) except Exception as e : logging . error ( str ( e ) )
Walk the directory and builds pre - calculation reports for all the job . ini files found .
123
19
232,046
def info ( calculators , gsims , views , exports , extracts , parameters , report , input_file = '' ) : if calculators : for calc in sorted ( base . calculators ) : print ( calc ) if gsims : for gs in gsim . get_available_gsims ( ) : print ( gs ) if views : for name in sorted ( view ) : print ( name ) if exports : dic = groupby ( export , operator . itemgetter ( 0 ) , lambda group : [ r [ 1 ] for r in group ] ) n = 0 for exporter , formats in dic . items ( ) : print ( exporter , formats ) n += len ( formats ) print ( 'There are %d exporters defined.' % n ) if extracts : for key in extract : func = extract [ key ] if hasattr ( func , '__wrapped__' ) : fm = FunctionMaker ( func . __wrapped__ ) else : fm = FunctionMaker ( func ) print ( '%s(%s)%s' % ( fm . name , fm . signature , fm . doc ) ) if parameters : params = [ ] for val in vars ( OqParam ) . values ( ) : if hasattr ( val , 'name' ) : params . append ( val ) params . sort ( key = lambda x : x . name ) for param in params : print ( param . name ) if os . path . isdir ( input_file ) and report : with Monitor ( 'info' , measuremem = True ) as mon : with mock . patch . object ( logging . root , 'info' ) : # reduce logging do_build_reports ( input_file ) print ( mon ) elif input_file . endswith ( '.xml' ) : node = nrml . read ( input_file ) if node [ 0 ] . tag . endswith ( 'sourceModel' ) : if node [ 'xmlns' ] . endswith ( 'nrml/0.4' ) : raise InvalidFile ( '%s is in NRML 0.4 format, please run the following ' 'command:\noq upgrade_nrml %s' % ( input_file , os . path . dirname ( input_file ) or '.' ) ) print ( source_model_info ( [ node [ 0 ] ] ) ) elif node [ 0 ] . tag . endswith ( 'logicTree' ) : nodes = [ nrml . read ( sm_path ) [ 0 ] for sm_path in logictree . collect_info ( input_file ) . smpaths ] print ( source_model_info ( nodes ) ) else : print ( node . to_str ( ) ) elif input_file . endswith ( ( '.ini' , '.zip' ) ) : with Monitor ( 'info' , measuremem = True ) as mon : if report : print ( 'Generated' , reportwriter . build_report ( input_file ) ) else : print_csm_info ( input_file ) if mon . duration > 1 : print ( mon ) elif input_file : print ( "No info for '%s'" % input_file )
Give information . You can pass the name of an available calculator a job . ini file or a zip archive with the input files .
694
27
232,047
def classical_split_filter ( srcs , srcfilter , gsims , params , monitor ) : # first check if we are sampling the sources ss = int ( os . environ . get ( 'OQ_SAMPLE_SOURCES' , 0 ) ) if ss : splits , stime = split_sources ( srcs ) srcs = readinput . random_filtered_sources ( splits , srcfilter , ss ) yield classical ( srcs , srcfilter , gsims , params , monitor ) return sources = [ ] with monitor ( "filtering/splitting sources" ) : for src , _sites in srcfilter ( srcs ) : if src . num_ruptures >= params [ 'maxweight' ] : splits , stime = split_sources ( [ src ] ) sources . extend ( srcfilter . filter ( splits ) ) else : sources . append ( src ) blocks = list ( block_splitter ( sources , params [ 'maxweight' ] , operator . attrgetter ( 'num_ruptures' ) ) ) if blocks : # yield the first blocks (if any) and compute the last block in core # NB: the last block is usually the smallest one for block in blocks [ : - 1 ] : yield classical , block , srcfilter , gsims , params yield classical ( blocks [ - 1 ] , srcfilter , gsims , params , monitor )
Split the given sources filter the subsources and the compute the PoEs . Yield back subtasks if the split sources contain more than maxweight ruptures .
301
32
232,048
def get_azimuth_plunge ( vect , degrees = True ) : if vect [ 0 ] > 0 : vect = - 1. * np . copy ( vect ) vect_hor = sqrt ( vect [ 1 ] ** 2. + vect [ 2 ] ** 2. ) plunge = atan2 ( - vect [ 0 ] , vect_hor ) azimuth = atan2 ( vect [ 2 ] , - vect [ 1 ] ) if degrees : icr = 180. / pi return icr * azimuth % 360. , icr * plunge else : return azimuth % ( 2. * pi ) , plunge
For a given vector in USE format retrieve the azimuth and plunge
147
14
232,049
def use_to_ned ( tensor ) : return np . array ( ROT_NED_USE . T * np . matrix ( tensor ) * ROT_NED_USE )
Converts a tensor in USE coordinate sytem to NED
42
13
232,050
def ned_to_use ( tensor ) : return np . array ( ROT_NED_USE * np . matrix ( tensor ) * ROT_NED_USE . T )
Converts a tensor in NED coordinate sytem to USE
43
13
232,051
def get_mean_and_stddevs ( self , sites , rup , dists , imt , stddev_types ) : mean , stddevs = super ( ) . get_mean_and_stddevs ( sites , rup , dists , imt , stddev_types ) # Defining frequency if imt == PGA ( ) : freq = 50.0 elif imt == PGV ( ) : freq = 2.0 else : freq = 1. / imt . period # Equation 3 of Atkinson (2010) x1 = np . min ( [ - 0.18 + 0.17 * np . log10 ( freq ) , 0 ] ) # Equation 4 a-b-c of Atkinson (2010) if rup . hypo_depth < 20.0 : x0 = np . max ( [ 0.217 - 0.321 * np . log10 ( freq ) , 0 ] ) elif rup . hypo_depth > 35.0 : x0 = np . min ( [ 0.263 + 0.0924 * np . log10 ( freq ) , 0.35 ] ) else : x0 = 0.2 # Limiting calculation distance to 1km # (as suggested by C. Bruce Worden) rjb = [ d if d > 1 else 1 for d in dists . rjb ] # Equation 2 and 5 of Atkinson (2010) mean += ( x0 + x1 * np . log10 ( rjb ) ) / np . log10 ( np . e ) return mean , stddevs
Using a frequency dependent correction for the mean ground motion . Standard deviation is fixed .
352
16
232,052
def get_rlz ( self , rlzstr ) : mo = re . match ( r'rlz-(\d+)' , rlzstr ) if not mo : return return self . realizations [ int ( mo . group ( 1 ) ) ]
r Get a Realization instance for a string of the form rlz - \ d +
56
19
232,053
def export ( datastore_key , calc_id = - 1 , exports = 'csv' , export_dir = '.' ) : dstore = util . read ( calc_id ) parent_id = dstore [ 'oqparam' ] . hazard_calculation_id if parent_id : dstore . parent = util . read ( parent_id ) dstore . export_dir = export_dir with performance . Monitor ( 'export' , measuremem = True ) as mon : for fmt in exports . split ( ',' ) : fnames = export_ ( ( datastore_key , fmt ) , dstore ) nbytes = sum ( os . path . getsize ( f ) for f in fnames ) print ( 'Exported %s in %s' % ( general . humansize ( nbytes ) , fnames ) ) if mon . duration > 1 : print ( mon ) dstore . close ( )
Export an output from the datastore .
199
9
232,054
def convert_UCERFSource ( self , node ) : dirname = os . path . dirname ( self . fname ) # where the source_model_file is source_file = os . path . join ( dirname , node [ "filename" ] ) if "startDate" in node . attrib and "investigationTime" in node . attrib : # Is a time-dependent model - even if rates were originally # poissonian # Verify that the source time span is the same as the TOM time span inv_time = float ( node [ "investigationTime" ] ) if inv_time != self . investigation_time : raise ValueError ( "Source investigation time (%s) is not " "equal to configuration investigation time " "(%s)" % ( inv_time , self . investigation_time ) ) start_date = datetime . strptime ( node [ "startDate" ] , "%d/%m/%Y" ) else : start_date = None return UCERFSource ( source_file , self . investigation_time , start_date , float ( node [ "minMag" ] ) , npd = self . convert_npdist ( node ) , hdd = self . convert_hpdist ( node ) , aspect = ~ node . ruptAspectRatio , upper_seismogenic_depth = ~ node . pointGeometry . upperSeismoDepth , lower_seismogenic_depth = ~ node . pointGeometry . lowerSeismoDepth , msr = valid . SCALEREL [ ~ node . magScaleRel ] ( ) , mesh_spacing = self . rupture_mesh_spacing , trt = node [ "tectonicRegion" ] )
Converts the Ucerf Source node into an SES Control object
367
14
232,055
def build_idx_set ( branch_id , start_date ) : code_set = branch_id . split ( "/" ) code_set . insert ( 3 , "Rates" ) idx_set = { "sec" : "/" . join ( [ code_set [ 0 ] , code_set [ 1 ] , "Sections" ] ) , "mag" : "/" . join ( [ code_set [ 0 ] , code_set [ 1 ] , code_set [ 2 ] , "Magnitude" ] ) } idx_set [ "rate" ] = "/" . join ( code_set ) idx_set [ "rake" ] = "/" . join ( [ code_set [ 0 ] , code_set [ 1 ] , "Rake" ] ) idx_set [ "msr" ] = "-" . join ( code_set [ : 3 ] ) idx_set [ "geol" ] = code_set [ 0 ] if start_date : # time-dependent source idx_set [ "grid_key" ] = "_" . join ( branch_id . replace ( "/" , "_" ) . split ( "_" ) [ : - 1 ] ) else : # time-independent source idx_set [ "grid_key" ] = branch_id . replace ( "/" , "_" ) idx_set [ "total_key" ] = branch_id . replace ( "/" , "|" ) return idx_set
Builds a dictionary of keys based on the branch code
327
11
232,056
def get_ridx ( self , iloc ) : with h5py . File ( self . source_file , "r" ) as hdf5 : return hdf5 [ self . idx_set [ "geol" ] + "/RuptureIndex" ] [ iloc ]
List of rupture indices for the given iloc
63
9
232,057
def get_background_sids ( self , src_filter ) : branch_key = self . idx_set [ "grid_key" ] idist = src_filter . integration_distance ( DEFAULT_TRT ) with h5py . File ( self . source_file , 'r' ) as hdf5 : bg_locations = hdf5 [ "Grid/Locations" ] . value distances = min_geodetic_distance ( src_filter . sitecol . xyz , ( bg_locations [ : , 0 ] , bg_locations [ : , 1 ] ) ) # Add buffer equal to half of length of median area from Mmax mmax_areas = self . msr . get_median_area ( hdf5 [ "/" . join ( [ "Grid" , branch_key , "MMax" ] ) ] . value , 0.0 ) # for instance hdf5['Grid/FM0_0_MEANFS_MEANMSR/MMax'] mmax_lengths = numpy . sqrt ( mmax_areas / self . aspect ) ok = distances <= ( 0.5 * mmax_lengths + idist ) # get list of indices from array of booleans return numpy . where ( ok ) [ 0 ] . tolist ( )
We can apply the filtering of the background sites as a pre - processing step - this is done here rather than in the sampling of the ruptures themselves
291
30
232,058
def iter_ruptures ( self ) : assert self . orig , '%s is not fully initialized' % self for ridx in range ( self . start , self . stop ) : if self . orig . rate [ ridx ] : # ruptures may have have zero rate rup = self . get_ucerf_rupture ( ridx , self . src_filter ) if rup : yield rup
Yield ruptures for the current set of indices
87
10
232,059
def get_background_sources ( self , src_filter , sample_factor = None ) : background_sids = self . get_background_sids ( src_filter ) if sample_factor is not None : # hack for use in the mosaic background_sids = random_filter ( background_sids , sample_factor , seed = 42 ) with h5py . File ( self . source_file , "r" ) as hdf5 : grid_loc = "/" . join ( [ "Grid" , self . idx_set [ "grid_key" ] ] ) # for instance Grid/FM0_0_MEANFS_MEANMSR_MeanRates mags = hdf5 [ grid_loc + "/Magnitude" ] . value mmax = hdf5 [ grid_loc + "/MMax" ] [ background_sids ] rates = hdf5 [ grid_loc + "/RateArray" ] [ background_sids , : ] locations = hdf5 [ "Grid/Locations" ] [ background_sids , : ] sources = [ ] for i , bg_idx in enumerate ( background_sids ) : src_id = "_" . join ( [ self . idx_set [ "grid_key" ] , str ( bg_idx ) ] ) src_name = "|" . join ( [ self . idx_set [ "total_key" ] , str ( bg_idx ) ] ) mag_idx = ( self . min_mag <= mags ) & ( mags < mmax [ i ] ) src_mags = mags [ mag_idx ] src_mfd = EvenlyDiscretizedMFD ( src_mags [ 0 ] , src_mags [ 1 ] - src_mags [ 0 ] , rates [ i , mag_idx ] . tolist ( ) ) ps = PointSource ( src_id , src_name , self . tectonic_region_type , src_mfd , self . mesh_spacing , self . msr , self . aspect , self . tom , self . usd , self . lsd , Point ( locations [ i , 0 ] , locations [ i , 1 ] ) , self . npd , self . hdd ) ps . id = self . id ps . src_group_id = self . src_group_id ps . num_ruptures = ps . count_ruptures ( ) sources . append ( ps ) return sources
Turn the background model of a given branch into a set of point sources
546
14
232,060
def split ( src , chunksize = MINWEIGHT ) : for i , block in enumerate ( block_splitter ( src . iter_ruptures ( ) , chunksize , key = operator . attrgetter ( 'mag' ) ) ) : rup = block [ 0 ] source_id = '%s:%d' % ( src . source_id , i ) amfd = mfd . ArbitraryMFD ( [ rup . mag ] , [ rup . mag_occ_rate ] ) rcs = RuptureCollectionSource ( source_id , src . name , src . tectonic_region_type , amfd , block ) yield rcs
Split a complex fault source in chunks
146
7
232,061
def get_bounding_box ( self , maxdist ) : locations = [ rup . hypocenter for rup in self . ruptures ] return get_bounding_box ( locations , maxdist )
Bounding box containing all the hypocenters enlarged by the maximum distance
44
14
232,062
def show_attrs ( key , calc_id = - 1 ) : ds = util . read ( calc_id ) try : attrs = h5py . File . __getitem__ ( ds . hdf5 , key ) . attrs except KeyError : print ( '%r is not in %s' % ( key , ds ) ) else : if len ( attrs ) == 0 : print ( '%s has no attributes' % key ) for name , value in attrs . items ( ) : print ( name , value ) finally : ds . close ( )
Show the attributes of a HDF5 dataset in the datastore .
127
15
232,063
def compare_mean_curves ( calc_ref , calc , nsigma = 3 ) : dstore_ref = datastore . read ( calc_ref ) dstore = datastore . read ( calc ) imtls = dstore_ref [ 'oqparam' ] . imtls if dstore [ 'oqparam' ] . imtls != imtls : raise RuntimeError ( 'The IMTs and levels are different between ' 'calculation %d and %d' % ( calc_ref , calc ) ) sitecol_ref = dstore_ref [ 'sitecol' ] sitecol = dstore [ 'sitecol' ] site_id_ref = { ( lon , lat ) : sid for sid , lon , lat in zip ( sitecol_ref . sids , sitecol_ref . lons , sitecol_ref . lats ) } site_id = { ( lon , lat ) : sid for sid , lon , lat in zip ( sitecol . sids , sitecol . lons , sitecol . lats ) } common = set ( site_id_ref ) & set ( site_id ) if not common : raise RuntimeError ( 'There are no common sites between calculation ' '%d and %d' % ( calc_ref , calc ) ) pmap_ref = PmapGetter ( dstore_ref , sids = [ site_id_ref [ lonlat ] for lonlat in common ] ) . get_mean ( ) pmap = PmapGetter ( dstore , sids = [ site_id [ lonlat ] for lonlat in common ] ) . get_mean ( ) for lonlat in common : mean , std = pmap [ site_id [ lonlat ] ] . array . T # shape (2, N) mean_ref , std_ref = pmap_ref [ site_id_ref [ lonlat ] ] . array . T err = numpy . sqrt ( std ** 2 + std_ref ** 2 ) for imt in imtls : sl = imtls ( imt ) ok = ( numpy . abs ( mean [ sl ] - mean_ref [ sl ] ) < nsigma * err [ sl ] ) . all ( ) if not ok : md = ( numpy . abs ( mean [ sl ] - mean_ref [ sl ] ) ) . max ( ) plt . title ( 'point=%s, imt=%s, maxdiff=%.2e' % ( lonlat , imt , md ) ) plt . loglog ( imtls [ imt ] , mean_ref [ sl ] + std_ref [ sl ] , label = str ( calc_ref ) , color = 'black' ) plt . loglog ( imtls [ imt ] , mean_ref [ sl ] - std_ref [ sl ] , color = 'black' ) plt . loglog ( imtls [ imt ] , mean [ sl ] + std [ sl ] , label = str ( calc ) , color = 'red' ) plt . loglog ( imtls [ imt ] , mean [ sl ] - std [ sl ] , color = 'red' ) plt . legend ( ) plt . show ( )
Compare the hazard curves coming from two different calculations .
715
10
232,064
def _get_stddevs ( self , sites , rup , C , stddev_types , ln_y_ref , exp1 , exp2 ) : ret = [ ] for stddev_type in stddev_types : assert stddev_type in self . DEFINED_FOR_STANDARD_DEVIATION_TYPES if stddev_type == const . StdDev . TOTAL : # eq. 13 ret . append ( 0.65 * np . ones_like ( sites . vs30 ) ) return ret
Returns the standard deviation which is fixed at 0 . 65 for every site
122
14
232,065
def build_imls ( ff , continuous_fragility_discretization , steps_per_interval = 0 ) : if ff . format == 'discrete' : imls = ff . imls if ff . nodamage and ff . nodamage < imls [ 0 ] : imls = [ ff . nodamage ] + imls if steps_per_interval > 1 : gen_imls = fine_graining ( imls , steps_per_interval ) else : gen_imls = imls else : # continuous gen_imls = numpy . linspace ( ff . minIML , ff . maxIML , continuous_fragility_discretization ) return gen_imls
Build intensity measure levels from a fragility function . If the function is continuous they are produced simply as a linear space between minIML and maxIML . If the function is discrete they are generated with a complex logic depending on the noDamageLimit and the parameter steps per interval .
159
57
232,066
def insured_loss_curve ( curve , deductible , insured_limit ) : losses , poes = curve [ : , curve [ 0 ] <= insured_limit ] limit_poe = interpolate . interp1d ( * curve , bounds_error = False , fill_value = 1 ) ( deductible ) return numpy . array ( [ losses , numpy . piecewise ( poes , [ poes > limit_poe ] , [ limit_poe , lambda x : x ] ) ] )
Compute an insured loss ratio curve given a loss ratio curve
108
12
232,067
def bcr ( eal_original , eal_retrofitted , interest_rate , asset_life_expectancy , asset_value , retrofitting_cost ) : return ( ( eal_original - eal_retrofitted ) * asset_value * ( 1 - numpy . exp ( - interest_rate * asset_life_expectancy ) ) / ( interest_rate * retrofitting_cost ) )
Compute the Benefit - Cost Ratio .
92
8
232,068
def pairwise_mean ( values ) : return numpy . array ( [ numpy . mean ( pair ) for pair in pairwise ( values ) ] )
Averages between a value and the next value in a sequence
33
12
232,069
def pairwise_diff ( values ) : return numpy . array ( [ x - y for x , y in pairwise ( values ) ] )
Differences between a value and the next value in a sequence
31
12
232,070
def mean_std ( fractions ) : n = fractions . shape [ 0 ] if n == 1 : # avoid warnings when computing the stddev return fractions [ 0 ] , numpy . ones_like ( fractions [ 0 ] ) * numpy . nan return numpy . mean ( fractions , axis = 0 ) , numpy . std ( fractions , axis = 0 , ddof = 1 )
Given an N x M matrix returns mean and std computed on the rows i . e . two M - dimensional vectors .
82
24
232,071
def broadcast ( func , composite_array , * args ) : dic = { } dtypes = [ ] for name in composite_array . dtype . names : dic [ name ] = func ( composite_array [ name ] , * args ) dtypes . append ( ( name , dic [ name ] . dtype ) ) res = numpy . zeros ( dic [ name ] . shape , numpy . dtype ( dtypes ) ) for name in dic : res [ name ] = dic [ name ] return res
Broadcast an array function over a composite array
115
9
232,072
def average_loss ( lc ) : losses , poes = ( lc [ 'loss' ] , lc [ 'poe' ] ) if lc . dtype . names else lc return - pairwise_diff ( losses ) @ pairwise_mean ( poes )
Given a loss curve array with poe and loss fields computes the average loss on a period of time .
61
22
232,073
def normalize_curves_eb ( curves ) : # we assume non-decreasing losses, so losses[-1] is the maximum loss non_zero_curves = [ ( losses , poes ) for losses , poes in curves if losses [ - 1 ] > 0 ] if not non_zero_curves : # no damage. all zero curves return curves [ 0 ] [ 0 ] , numpy . array ( [ poes for _losses , poes in curves ] ) else : # standard case max_losses = [ losses [ - 1 ] for losses , _poes in non_zero_curves ] reference_curve = non_zero_curves [ numpy . argmax ( max_losses ) ] loss_ratios = reference_curve [ 0 ] curves_poes = [ interpolate . interp1d ( losses , poes , bounds_error = False , fill_value = 0 ) ( loss_ratios ) for losses , poes in curves ] # fix degenerated case with flat curve for cp in curves_poes : if numpy . isnan ( cp [ 0 ] ) : cp [ 0 ] = 0 return loss_ratios , numpy . array ( curves_poes )
A more sophisticated version of normalize_curves used in the event based calculator .
268
17
232,074
def sample ( self , means , covs , idxs , epsilons = None ) : if epsilons is None : return means self . set_distribution ( epsilons ) res = self . distribution . sample ( means , covs , means * covs , idxs ) return res
Sample the epsilons and apply the corrections to the means . This method is called only if there are nonzero covs .
66
27
232,075
def mean_loss_ratios_with_steps ( self , steps ) : loss_ratios = self . mean_loss_ratios if min ( loss_ratios ) > 0.0 : # prepend with a zero loss_ratios = numpy . concatenate ( [ [ 0.0 ] , loss_ratios ] ) if max ( loss_ratios ) < 1.0 : # append a 1.0 loss_ratios = numpy . concatenate ( [ loss_ratios , [ 1.0 ] ] ) return fine_graining ( loss_ratios , steps )
Split the mean loss ratios producing a new set of loss ratios . The new set of loss ratios always includes 0 . 0 and 1 . 0
131
28
232,076
def sample ( self , probs , _covs , idxs , epsilons ) : self . set_distribution ( epsilons ) return self . distribution . sample ( self . loss_ratios , probs )
Sample the . loss_ratios with the given probabilities .
51
12
232,077
def build ( self , continuous_fragility_discretization , steps_per_interval ) : newfm = copy . copy ( self ) for key , ffl in self . items ( ) : newfm [ key ] = ffl . build ( self . limitStates , continuous_fragility_discretization , steps_per_interval ) return newfm
Return a new FragilityModel instance in which the values have been replaced with FragilityFunctionList instances .
80
21
232,078
def compute_gmfs ( rupgetter , srcfilter , param , monitor ) : getter = GmfGetter ( rupgetter , srcfilter , param [ 'oqparam' ] ) with monitor ( 'getting ruptures' ) : getter . init ( ) return getter . compute_gmfs_curves ( monitor )
Compute GMFs and optionally hazard curves
75
8
232,079
def _get_minmax_edges ( self , edge ) : if isinstance ( edge , Line ) : # For instance of line class need to loop over values depth_vals = np . array ( [ node . depth for node in edge . points ] ) else : depth_vals = edge [ : , 2 ] temp_upper_depth = np . min ( depth_vals ) if not self . upper_depth : self . upper_depth = temp_upper_depth else : if temp_upper_depth < self . upper_depth : self . upper_depth = temp_upper_depth temp_lower_depth = np . max ( depth_vals ) if not self . lower_depth : self . lower_depth = temp_lower_depth else : if temp_lower_depth > self . lower_depth : self . lower_depth = temp_lower_depth
Updates the upper and lower depths based on the input edges
185
12
232,080
def _get_magnitude_term ( self , C , mag ) : if mag >= self . CONSTS [ "Mh" ] : return C [ "e1" ] + C [ "b3" ] * ( mag - self . CONSTS [ "Mh" ] ) else : return C [ "e1" ] + ( C [ "b1" ] * ( mag - self . CONSTS [ "Mh" ] ) ) + ( C [ "b2" ] * ( mag - self . CONSTS [ "Mh" ] ) ** 2. )
Returns the magnitude scaling term - equation 3
129
8
232,081
def _get_distance_term ( self , C , rjb , mag ) : c_3 = self . _get_anelastic_coeff ( C ) rval = np . sqrt ( rjb ** 2. + C [ "h" ] ** 2. ) return ( C [ "c1" ] + C [ "c2" ] * ( mag - self . CONSTS [ "Mref" ] ) ) * np . log ( rval / self . CONSTS [ "Rref" ] ) + c_3 * ( rval - self . CONSTS [ "Rref" ] )
Returns the general distance scaling term - equation 2
137
9
232,082
def _get_site_term ( self , C , vs30 ) : dg1 , dg2 = self . _get_regional_site_term ( C ) return ( C [ "g1" ] + dg1 ) + ( C [ "g2" ] + dg2 ) * np . log ( vs30 )
Returns only a linear site amplification term
74
7
232,083
def _get_stddevs ( self , C , stddev_types , num_sites ) : assert all ( stddev_type in self . DEFINED_FOR_STANDARD_DEVIATION_TYPES for stddev_type in stddev_types ) stddevs = [ np . zeros ( num_sites ) + C [ 'SigmaTot' ] for _ in stddev_types ] return stddevs
Return standard deviations as defined in tables below
104
8
232,084
def _compute_mean ( self , C , mag , rrup , hypo_depth , delta_R , delta_S , delta_V , delta_I , vs30 ) : # mean is calculated for all the 4 classes using the same equation. # For DowrickRhoades2005SSlab, the coefficients which don't appear in # Model 3 equationare assigned to zero mean = ( C [ 'A1' ] + ( C [ 'A2' ] + C [ 'A2R' ] * delta_R + C [ 'A2V' ] * delta_V ) * mag + ( C [ 'A3' ] + C [ 'A3S' ] * delta_S + C [ 'A3V' ] * delta_V ) * np . log10 ( np . power ( ( rrup ** 3 + C [ 'd' ] ** 3 ) , 1.0 / 3.0 ) ) + C [ 'A4' ] * hypo_depth + C [ 'A5' ] * delta_I ) # Get S site class term S = self . _get_site_class ( vs30 , mean ) # Add S amplification term to mean value mean = mean + S return mean
Compute MMI Intensity Value as per Equation in Table 5 and Table 7 pag 198 .
263
20
232,085
def _get_stddevs ( self , C , stddev_types , num_sites ) : # interevent stddev sigma_inter = C [ 'tau' ] + np . zeros ( num_sites ) # intraevent std sigma_intra = C [ 'sigma' ] + np . zeros ( num_sites ) std = [ ] for stddev_type in stddev_types : if stddev_type == const . StdDev . TOTAL : # equation in section 5.2 page 200 std += [ np . sqrt ( sigma_intra ** 2 + sigma_inter ** 2 ) ] elif stddev_type == const . StdDev . INTRA_EVENT : std . append ( sigma_intra ) elif stddev_type == const . StdDev . INTER_EVENT : std . append ( sigma_inter ) return std
Return total standard deviation as described in paragraph 5 . 2 pag 200 .
205
14
232,086
def plot_assets ( calc_id = - 1 , site_model = False ) : # NB: matplotlib is imported inside since it is a costly import import matplotlib . pyplot as p from openquake . hmtk . plotting . patch import PolygonPatch dstore = util . read ( calc_id ) try : region = dstore [ 'oqparam' ] . region except KeyError : region = None sitecol = dstore [ 'sitecol' ] try : assetcol = dstore [ 'assetcol' ] . value except AttributeError : assetcol = dstore [ 'assetcol' ] . array fig = p . figure ( ) ax = fig . add_subplot ( 111 ) if region : pp = PolygonPatch ( shapely . wkt . loads ( region ) , alpha = 0.1 ) ax . add_patch ( pp ) ax . grid ( True ) if site_model and 'site_model' in dstore : sm = dstore [ 'site_model' ] sm_lons , sm_lats = sm [ 'lon' ] , sm [ 'lat' ] if len ( sm_lons ) > 1 and cross_idl ( * sm_lons ) : sm_lons %= 360 p . scatter ( sm_lons , sm_lats , marker = '.' , color = 'orange' ) p . scatter ( sitecol . complete . lons , sitecol . complete . lats , marker = '.' , color = 'gray' ) p . scatter ( assetcol [ 'lon' ] , assetcol [ 'lat' ] , marker = '.' , color = 'green' ) p . scatter ( sitecol . lons , sitecol . lats , marker = '+' , color = 'black' ) if 'discarded' in dstore : disc = numpy . unique ( dstore [ 'discarded' ] . value [ [ 'lon' , 'lat' ] ] ) p . scatter ( disc [ 'lon' ] , disc [ 'lat' ] , marker = 'x' , color = 'red' ) p . show ( )
Plot the sites and the assets
462
6
232,087
def _get_adjustment ( mag , year , mmin , completeness_year , t_f , mag_inc = 0.1 ) : if len ( completeness_year ) == 1 : if ( mag >= mmin ) and ( year >= completeness_year [ 0 ] ) : # No adjustment needed - event weight == 1 return 1.0 else : # Event should not be counted return False kval = int ( ( ( mag - mmin ) / mag_inc ) ) + 1 if ( kval >= 1 ) and ( year >= completeness_year [ kval - 1 ] ) : return t_f else : return False
If the magnitude is greater than the minimum in the completeness table and the year is greater than the corresponding completeness year then return the Weichert factor
137
32
232,088
def get_catalogue_bounding_polygon ( catalogue ) : upper_lon = np . max ( catalogue . data [ 'longitude' ] ) upper_lat = np . max ( catalogue . data [ 'latitude' ] ) lower_lon = np . min ( catalogue . data [ 'longitude' ] ) lower_lat = np . min ( catalogue . data [ 'latitude' ] ) return Polygon ( [ Point ( lower_lon , upper_lat ) , Point ( upper_lon , upper_lat ) , Point ( upper_lon , lower_lat ) , Point ( lower_lon , lower_lat ) ] )
Returns a polygon containing the bounding box of the catalogue
137
12
232,089
def make_from_catalogue ( cls , catalogue , spacing , dilate ) : new = cls ( ) cat_bbox = get_catalogue_bounding_polygon ( catalogue ) if dilate > 0 : cat_bbox = cat_bbox . dilate ( dilate ) # Define Grid spacing new . update ( { 'xmin' : np . min ( cat_bbox . lons ) , 'xmax' : np . max ( cat_bbox . lons ) , 'xspc' : spacing , 'ymin' : np . min ( cat_bbox . lats ) , 'ymax' : np . max ( cat_bbox . lats ) , 'yspc' : spacing , 'zmin' : 0. , 'zmax' : np . max ( catalogue . data [ 'depth' ] ) , 'zspc' : np . max ( catalogue . data [ 'depth' ] ) } ) if new [ 'zmin' ] == new [ 'zmax' ] == new [ 'zspc' ] == 0 : new [ 'zmax' ] = new [ 'zspc' ] = 1 return new
Defines the grid on the basis of the catalogue
258
10
232,090
def write_to_csv ( self , filename ) : fid = open ( filename , 'wt' ) # Create header list header_info = [ 'Longitude' , 'Latitude' , 'Depth' , 'Observed Count' , 'Smoothed Rate' , 'b-value' ] writer = csv . DictWriter ( fid , fieldnames = header_info ) headers = dict ( ( name0 , name0 ) for name0 in header_info ) # Write to file writer . writerow ( headers ) for row in self . data : # institute crude compression by omitting points with no seismicity # and taking advantage of the %g format if row [ 4 ] == 0 : continue row_dict = { 'Longitude' : '%g' % row [ 0 ] , 'Latitude' : '%g' % row [ 1 ] , 'Depth' : '%g' % row [ 2 ] , 'Observed Count' : '%d' % row [ 3 ] , 'Smoothed Rate' : '%.6g' % row [ 4 ] , 'b-value' : '%g' % self . bval } writer . writerow ( row_dict ) fid . close ( )
Exports to simple csv
262
6
232,091
def _validate_hazard_metadata ( md ) : if ( md . get ( 'statistics' ) is not None and ( md . get ( 'smlt_path' ) is not None or md . get ( 'gsimlt_path' ) is not None ) ) : raise ValueError ( 'Cannot specify both `statistics` and logic tree ' 'paths' ) if md . get ( 'statistics' ) is not None : # make sure only valid statistics types are specified if md . get ( 'statistics' ) not in ( 'mean' , 'max' , 'quantile' , 'std' ) : raise ValueError ( '`statistics` must be either `mean`, `max`, or ' '`quantile`' ) else : # must specify both logic tree paths if md . get ( 'smlt_path' ) is None or md . get ( 'gsimlt_path' ) is None : raise ValueError ( 'Both logic tree paths are required for ' 'non-statistical results' ) if md . get ( 'statistics' ) == 'quantile' : if md . get ( 'quantile_value' ) is None : raise ValueError ( 'quantile stastics results require a quantile' ' value to be specified' ) if not md . get ( 'statistics' ) == 'quantile' : if md . get ( 'quantile_value' ) is not None : raise ValueError ( 'Quantile value must be specified with ' 'quantile statistics' ) if md . get ( 'imt' ) == 'SA' : if md . get ( 'sa_period' ) is None : raise ValueError ( '`sa_period` is required for IMT == `SA`' ) if md . get ( 'sa_damping' ) is None : raise ValueError ( '`sa_damping` is required for IMT == `SA`' )
Validate metadata dict of attributes which are more or less the same for hazard curves hazard maps and disaggregation histograms .
416
24
232,092
def _set_metadata ( element , metadata , attr_map , transform = str ) : for kw , attr in attr_map . items ( ) : value = metadata . get ( kw ) if value is not None : element . set ( attr , transform ( value ) )
Set metadata attributes on a given element .
63
8
232,093
def serialize ( self , data ) : with open ( self . dest , 'wb' ) as fh : root = et . Element ( 'nrml' ) self . add_hazard_curves ( root , self . metadata , data ) nrml . write ( list ( root ) , fh )
Write a sequence of hazard curves to the specified file .
66
11
232,094
def add_hazard_curves ( self , root , metadata , data ) : hazard_curves = et . SubElement ( root , 'hazardCurves' ) _set_metadata ( hazard_curves , metadata , _ATTR_MAP ) imls_elem = et . SubElement ( hazard_curves , 'IMLs' ) imls_elem . text = ' ' . join ( map ( scientificformat , metadata [ 'imls' ] ) ) gml_ns = nrml . SERIALIZE_NS_MAP [ 'gml' ] for hc in data : hc_elem = et . SubElement ( hazard_curves , 'hazardCurve' ) gml_point = et . SubElement ( hc_elem , '{%s}Point' % gml_ns ) gml_pos = et . SubElement ( gml_point , '{%s}pos' % gml_ns ) gml_pos . text = '%s %s' % ( hc . location . x , hc . location . y ) poes_elem = et . SubElement ( hc_elem , 'poEs' ) poes_elem . text = ' ' . join ( map ( scientificformat , hc . poes ) )
Add hazard curves stored into data as child of the root element with metadata . See the documentation of the method serialize and the constructor for a description of data and metadata respectively .
285
35
232,095
def serialize ( self , data , fmt = '%10.7E' ) : gmf_set_nodes = [ ] for gmf_set in data : gmf_set_node = Node ( 'gmfSet' ) if gmf_set . investigation_time : gmf_set_node [ 'investigationTime' ] = str ( gmf_set . investigation_time ) gmf_set_node [ 'stochasticEventSetId' ] = str ( gmf_set . stochastic_event_set_id ) gmf_set_node . nodes = gen_gmfs ( gmf_set ) gmf_set_nodes . append ( gmf_set_node ) gmf_container = Node ( 'gmfCollection' ) gmf_container [ SM_TREE_PATH ] = self . sm_lt_path gmf_container [ GSIM_TREE_PATH ] = self . gsim_lt_path gmf_container . nodes = gmf_set_nodes with open ( self . dest , 'wb' ) as dest : nrml . write ( [ gmf_container ] , dest , fmt )
Serialize a collection of ground motion fields to XML .
275
11
232,096
def serialize ( self , data , investigation_time ) : with open ( self . dest , 'wb' ) as fh : root = et . Element ( 'nrml' ) ses_container = et . SubElement ( root , 'ruptureCollection' ) ses_container . set ( 'investigationTime' , str ( investigation_time ) ) for grp_id in sorted ( data ) : attrs = dict ( id = grp_id , tectonicRegion = data [ grp_id ] [ 0 ] . tectonic_region_type ) sg = et . SubElement ( ses_container , 'ruptureGroup' , attrs ) for rupture in data [ grp_id ] : rupture_to_element ( rupture , sg ) nrml . write ( list ( root ) , fh )
Serialize a collection of stochastic event sets to XML .
182
13
232,097
def serialize ( self , data ) : with open ( self . dest , 'wb' ) as fh : root = et . Element ( 'nrml' ) hazard_map = et . SubElement ( root , 'hazardMap' ) _set_metadata ( hazard_map , self . metadata , _ATTR_MAP ) for lon , lat , iml in data : node = et . SubElement ( hazard_map , 'node' ) node . set ( 'lon' , str ( lon ) ) node . set ( 'lat' , str ( lat ) ) node . set ( 'iml' , str ( iml ) ) nrml . write ( list ( root ) , fh )
Serialize hazard map data to XML .
152
8
232,098
def serialize ( self , data ) : gml_ns = nrml . SERIALIZE_NS_MAP [ 'gml' ] with open ( self . dest , 'wb' ) as fh : root = et . Element ( 'nrml' ) uh_spectra = et . SubElement ( root , 'uniformHazardSpectra' ) _set_metadata ( uh_spectra , self . metadata , _ATTR_MAP ) periods_elem = et . SubElement ( uh_spectra , 'periods' ) periods_elem . text = ' ' . join ( [ str ( x ) for x in self . metadata [ 'periods' ] ] ) for uhs in data : uhs_elem = et . SubElement ( uh_spectra , 'uhs' ) gml_point = et . SubElement ( uhs_elem , '{%s}Point' % gml_ns ) gml_pos = et . SubElement ( gml_point , '{%s}pos' % gml_ns ) gml_pos . text = '%s %s' % ( uhs . location . x , uhs . location . y ) imls_elem = et . SubElement ( uhs_elem , 'IMLs' ) imls_elem . text = ' ' . join ( [ '%10.7E' % x for x in uhs . imls ] ) nrml . write ( list ( root ) , fh )
Write a sequence of uniform hazard spectra to the specified file .
332
13
232,099
def check_config ( config , data ) : essential_keys = [ 'input_mmin' , 'b-value' , 'sigma-b' ] for key in essential_keys : if not key in config . keys ( ) : raise ValueError ( 'For KijkoSellevolBayes the key %s needs to ' 'be set in the configuation' % key ) if 'tolerance' not in config . keys ( ) or not config [ 'tolerance' ] : config [ 'tolerance' ] = 1E-5 if not config . get ( 'maximum_iterations' , False ) : config [ 'maximum_iterations' ] = 1000 if config [ 'input_mmin' ] < np . min ( data [ 'magnitude' ] ) : config [ 'input_mmin' ] = np . min ( data [ 'magnitude' ] ) if fabs ( config [ 'sigma-b' ] < 1E-15 ) : raise ValueError ( 'Sigma-b must be greater than zero!' ) return config
Check config file inputs
231
4