idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
44,100
def hmcorrelation ( sites_or_distances , imt , uncertainty_multiplier = 0 ) : if hasattr ( sites_or_distances , 'mesh' ) : distances = sites_or_distances . mesh . get_distance_matrix ( ) else : distances = sites_or_distances period = imt . period if period < 1.37 : Med_b = 4.231 * period * period - 5.180 * period + 13.392 else : Med_b = 0.140 * period * period - 2.249 * period + 17.050 Std_b = ( 4.63e-3 * period * period + 0.028 * period + 0.713 ) if uncertainty_multiplier == 0 : beta = Med_b else : beta = numpy . random . lognormal ( numpy . log ( Med_b ) , Std_b * uncertainty_multiplier ) return numpy . exp ( - numpy . power ( ( distances / beta ) , 0.55 ) )
Returns the Heresi - Miranda correlation model .
44,101
def get_lower_triangle_correlation_matrix ( self , sites , imt ) : return numpy . linalg . cholesky ( self . _get_correlation_matrix ( sites , imt ) )
Get lower - triangle matrix as a result of Cholesky - decomposition of correlation matrix .
44,102
def start_ebrisk ( rupgetter , srcfilter , param , monitor ) : with monitor ( 'weighting ruptures' ) : rupgetter . set_weights ( srcfilter , param [ 'num_taxonomies' ] ) if rupgetter . weights . sum ( ) <= param [ 'maxweight' ] : yield ebrisk ( rupgetter , srcfilter , param , monitor ) else : for rgetter in rupgetter . split ( param [ 'maxweight' ] ) : yield ebrisk , rgetter , srcfilter , param
Launcher for ebrisk tasks
44,103
def get_min_max_mag ( self ) : "Return the minimum and maximum magnitudes" mag , num_bins = self . _get_min_mag_and_num_bins ( ) return mag , mag + self . bin_width * ( num_bins - 1 )
Return the minimum and maximum magnitudes
44,104
def _get_rate ( self , mag ) : mag_lo = mag - self . bin_width / 2.0 mag_hi = mag + self . bin_width / 2.0 if mag >= self . min_mag and mag < self . char_mag - DELTA_CHAR / 2 : return ( 10 ** ( self . a_val - self . b_val * mag_lo ) - 10 ** ( self . a_val - self . b_val * mag_hi ) ) else : return ( self . char_rate / DELTA_CHAR ) * self . bin_width
Calculate and return the annual occurrence rate for a specific bin .
44,105
def _get_min_mag_and_num_bins ( self ) : min_mag = round ( self . min_mag / self . bin_width ) * self . bin_width max_mag = ( round ( ( self . char_mag + DELTA_CHAR / 2 ) / self . bin_width ) * self . bin_width ) min_mag += self . bin_width / 2.0 max_mag -= self . bin_width / 2.0 num_bins = int ( round ( ( max_mag - min_mag ) / self . bin_width ) ) + 1 return min_mag , num_bins
Estimate the number of bins in the histogram and return it along with the first bin center value .
44,106
def get_annual_occurrence_rates ( self ) : mag , num_bins = self . _get_min_mag_and_num_bins ( ) rates = [ ] for i in range ( num_bins ) : rate = self . _get_rate ( mag ) rates . append ( ( mag , rate ) ) mag += self . bin_width return rates
Calculate and return the annual occurrence rates histogram .
44,107
def create_geometry ( self , input_geometry , upper_depth , lower_depth ) : self . _check_seismogenic_depths ( upper_depth , lower_depth ) if not isinstance ( input_geometry , Polygon ) : if not isinstance ( input_geometry , np . ndarray ) : raise ValueError ( 'Unrecognised or unsupported geometry ' 'definition' ) if np . shape ( input_geometry ) [ 0 ] < 3 : raise ValueError ( 'Incorrectly formatted polygon geometry -' ' needs three or more vertices' ) geometry = [ ] for row in input_geometry : geometry . append ( Point ( row [ 0 ] , row [ 1 ] , self . upper_depth ) ) self . geometry = Polygon ( geometry ) else : self . geometry = input_geometry
If geometry is defined as a numpy array then create instance of nhlib . geo . polygon . Polygon class otherwise if already instance of class accept class
44,108
def select_catalogue ( self , selector , distance = None ) : if selector . catalogue . get_number_events ( ) < 1 : raise ValueError ( 'No events found in catalogue!' ) self . catalogue = selector . within_polygon ( self . geometry , distance , upper_depth = self . upper_depth , lower_depth = self . lower_depth ) if self . catalogue . get_number_events ( ) < 5 : warnings . warn ( 'Source %s (%s) has fewer than 5 events' % ( self . id , self . name ) )
Selects the catalogue of earthquakes attributable to the source
44,109
def new ( self , operation = 'no operation' , ** kw ) : self_vars = vars ( self ) . copy ( ) del self_vars [ 'operation' ] del self_vars [ 'children' ] del self_vars [ 'counts' ] del self_vars [ '_flush' ] new = self . __class__ ( operation ) vars ( new ) . update ( self_vars ) vars ( new ) . update ( kw ) return new
Return a copy of the monitor usable for a different operation .
44,110
def from_shakemap ( cls , shakemap_array ) : self = object . __new__ ( cls ) self . complete = self n = len ( shakemap_array ) dtype = numpy . dtype ( [ ( p , site_param_dt [ p ] ) for p in 'sids lon lat depth vs30' . split ( ) ] ) self . array = arr = numpy . zeros ( n , dtype ) arr [ 'sids' ] = numpy . arange ( n , dtype = numpy . uint32 ) arr [ 'lon' ] = shakemap_array [ 'lon' ] arr [ 'lat' ] = shakemap_array [ 'lat' ] arr [ 'depth' ] = numpy . zeros ( n ) arr [ 'vs30' ] = shakemap_array [ 'vs30' ] arr . flags . writeable = False return self
Build a site collection from a shakemap array
44,111
def from_points ( cls , lons , lats , depths = None , sitemodel = None , req_site_params = ( ) ) : assert len ( lons ) < U32LIMIT , len ( lons ) if depths is None : depths = numpy . zeros ( len ( lons ) ) assert len ( lons ) == len ( lats ) == len ( depths ) , ( len ( lons ) , len ( lats ) , len ( depths ) ) self = object . __new__ ( cls ) self . complete = self req = [ 'sids' , 'lon' , 'lat' , 'depth' ] + sorted ( par for par in req_site_params if par not in ( 'lon' , 'lat' ) ) if 'vs30' in req and 'vs30measured' not in req : req . append ( 'vs30measured' ) self . dtype = numpy . dtype ( [ ( p , site_param_dt [ p ] ) for p in req ] ) self . array = arr = numpy . zeros ( len ( lons ) , self . dtype ) arr [ 'sids' ] = numpy . arange ( len ( lons ) , dtype = numpy . uint32 ) arr [ 'lon' ] = fix_lon ( numpy . array ( lons ) ) arr [ 'lat' ] = numpy . array ( lats ) arr [ 'depth' ] = numpy . array ( depths ) if sitemodel is None : pass elif hasattr ( sitemodel , 'reference_vs30_value' ) : self . _set ( 'vs30' , sitemodel . reference_vs30_value ) self . _set ( 'vs30measured' , sitemodel . reference_vs30_type == 'measured' ) self . _set ( 'z1pt0' , sitemodel . reference_depth_to_1pt0km_per_sec ) self . _set ( 'z2pt5' , sitemodel . reference_depth_to_2pt5km_per_sec ) self . _set ( 'siteclass' , sitemodel . reference_siteclass ) else : for name in sitemodel . dtype . names : if name not in ( 'lon' , 'lat' ) : self . _set ( name , sitemodel [ name ] ) return self
Build the site collection from
44,112
def make_complete ( self ) : self . array [ 'sids' ] = numpy . arange ( len ( self ) , dtype = numpy . uint32 ) self . complete = self
Turns the site collection into a complete one if needed
44,113
def filter ( self , mask ) : assert len ( mask ) == len ( self ) , ( len ( mask ) , len ( self ) ) if mask . all ( ) : return self if not mask . any ( ) : return None indices , = mask . nonzero ( ) return self . filtered ( indices )
Create a SiteCollection with only a subset of sites .
44,114
def point_at ( self , horizontal_distance , vertical_increment , azimuth ) : lon , lat = geodetic . point_at ( self . longitude , self . latitude , azimuth , horizontal_distance ) return Point ( lon , lat , self . depth + vertical_increment )
Compute the point with given horizontal vertical distances and azimuth from this point .
44,115
def equally_spaced_points ( self , point , distance ) : lons , lats , depths = geodetic . intervals_between ( self . longitude , self . latitude , self . depth , point . longitude , point . latitude , point . depth , distance ) return [ Point ( lons [ i ] , lats [ i ] , depths [ i ] ) for i in range ( len ( lons ) ) ]
Compute the set of points equally spaced between this point and the given point .
44,116
def to_polygon ( self , radius ) : assert radius > 0 from openquake . hazardlib . geo . polygon import Polygon proj = geo_utils . OrthographicProjection ( self . longitude , self . longitude , self . latitude , self . latitude ) point = shapely . geometry . Point ( * proj ( self . longitude , self . latitude ) ) return Polygon . _from_2d ( point . buffer ( radius ) , proj )
Create a circular polygon with specified radius centered in the point .
44,117
def closer_than ( self , mesh , radius ) : dists = geodetic . distance ( self . longitude , self . latitude , self . depth , mesh . lons , mesh . lats , 0 if mesh . depths is None else mesh . depths ) return dists <= radius
Check for proximity of points in the mesh .
44,118
def print_csm_info ( fname ) : oqparam = readinput . get_oqparam ( fname ) csm = readinput . get_composite_source_model ( oqparam , in_memory = False ) print ( csm . info ) print ( 'See http://docs.openquake.org/oq-engine/stable/' 'effective-realizations.html for an explanation' ) rlzs_assoc = csm . info . get_rlzs_assoc ( ) print ( rlzs_assoc ) dupl = [ ( srcs [ 0 ] [ 'id' ] , len ( srcs ) ) for srcs in csm . check_dupl_sources ( ) ] if dupl : print ( rst_table ( dupl , [ 'source_id' , 'multiplicity' ] ) ) tot , pairs = get_pickled_sizes ( rlzs_assoc ) print ( rst_table ( pairs , [ 'attribute' , 'nbytes' ] ) )
Parse the composite source model without instantiating the sources and prints information about its composition and the full logic tree
44,119
def do_build_reports ( directory ) : for cwd , dirs , files in os . walk ( directory ) : for f in sorted ( files ) : if f in ( 'job.ini' , 'job_h.ini' , 'job_haz.ini' , 'job_hazard.ini' ) : job_ini = os . path . join ( cwd , f ) logging . info ( job_ini ) try : reportwriter . build_report ( job_ini , cwd ) except Exception as e : logging . error ( str ( e ) )
Walk the directory and builds pre - calculation reports for all the job . ini files found .
44,120
def info ( calculators , gsims , views , exports , extracts , parameters , report , input_file = '' ) : if calculators : for calc in sorted ( base . calculators ) : print ( calc ) if gsims : for gs in gsim . get_available_gsims ( ) : print ( gs ) if views : for name in sorted ( view ) : print ( name ) if exports : dic = groupby ( export , operator . itemgetter ( 0 ) , lambda group : [ r [ 1 ] for r in group ] ) n = 0 for exporter , formats in dic . items ( ) : print ( exporter , formats ) n += len ( formats ) print ( 'There are %d exporters defined.' % n ) if extracts : for key in extract : func = extract [ key ] if hasattr ( func , '__wrapped__' ) : fm = FunctionMaker ( func . __wrapped__ ) else : fm = FunctionMaker ( func ) print ( '%s(%s)%s' % ( fm . name , fm . signature , fm . doc ) ) if parameters : params = [ ] for val in vars ( OqParam ) . values ( ) : if hasattr ( val , 'name' ) : params . append ( val ) params . sort ( key = lambda x : x . name ) for param in params : print ( param . name ) if os . path . isdir ( input_file ) and report : with Monitor ( 'info' , measuremem = True ) as mon : with mock . patch . object ( logging . root , 'info' ) : do_build_reports ( input_file ) print ( mon ) elif input_file . endswith ( '.xml' ) : node = nrml . read ( input_file ) if node [ 0 ] . tag . endswith ( 'sourceModel' ) : if node [ 'xmlns' ] . endswith ( 'nrml/0.4' ) : raise InvalidFile ( '%s is in NRML 0.4 format, please run the following ' 'command:\noq upgrade_nrml %s' % ( input_file , os . path . dirname ( input_file ) or '.' ) ) print ( source_model_info ( [ node [ 0 ] ] ) ) elif node [ 0 ] . tag . endswith ( 'logicTree' ) : nodes = [ nrml . read ( sm_path ) [ 0 ] for sm_path in logictree . collect_info ( input_file ) . smpaths ] print ( source_model_info ( nodes ) ) else : print ( node . to_str ( ) ) elif input_file . endswith ( ( '.ini' , '.zip' ) ) : with Monitor ( 'info' , measuremem = True ) as mon : if report : print ( 'Generated' , reportwriter . build_report ( input_file ) ) else : print_csm_info ( input_file ) if mon . duration > 1 : print ( mon ) elif input_file : print ( "No info for '%s'" % input_file )
Give information . You can pass the name of an available calculator a job . ini file or a zip archive with the input files .
44,121
def classical_split_filter ( srcs , srcfilter , gsims , params , monitor ) : ss = int ( os . environ . get ( 'OQ_SAMPLE_SOURCES' , 0 ) ) if ss : splits , stime = split_sources ( srcs ) srcs = readinput . random_filtered_sources ( splits , srcfilter , ss ) yield classical ( srcs , srcfilter , gsims , params , monitor ) return sources = [ ] with monitor ( "filtering/splitting sources" ) : for src , _sites in srcfilter ( srcs ) : if src . num_ruptures >= params [ 'maxweight' ] : splits , stime = split_sources ( [ src ] ) sources . extend ( srcfilter . filter ( splits ) ) else : sources . append ( src ) blocks = list ( block_splitter ( sources , params [ 'maxweight' ] , operator . attrgetter ( 'num_ruptures' ) ) ) if blocks : for block in blocks [ : - 1 ] : yield classical , block , srcfilter , gsims , params yield classical ( blocks [ - 1 ] , srcfilter , gsims , params , monitor )
Split the given sources filter the subsources and the compute the PoEs . Yield back subtasks if the split sources contain more than maxweight ruptures .
44,122
def get_azimuth_plunge ( vect , degrees = True ) : if vect [ 0 ] > 0 : vect = - 1. * np . copy ( vect ) vect_hor = sqrt ( vect [ 1 ] ** 2. + vect [ 2 ] ** 2. ) plunge = atan2 ( - vect [ 0 ] , vect_hor ) azimuth = atan2 ( vect [ 2 ] , - vect [ 1 ] ) if degrees : icr = 180. / pi return icr * azimuth % 360. , icr * plunge else : return azimuth % ( 2. * pi ) , plunge
For a given vector in USE format retrieve the azimuth and plunge
44,123
def use_to_ned ( tensor ) : return np . array ( ROT_NED_USE . T * np . matrix ( tensor ) * ROT_NED_USE )
Converts a tensor in USE coordinate sytem to NED
44,124
def ned_to_use ( tensor ) : return np . array ( ROT_NED_USE * np . matrix ( tensor ) * ROT_NED_USE . T )
Converts a tensor in NED coordinate sytem to USE
44,125
def get_mean_and_stddevs ( self , sites , rup , dists , imt , stddev_types ) : mean , stddevs = super ( ) . get_mean_and_stddevs ( sites , rup , dists , imt , stddev_types ) if imt == PGA ( ) : freq = 50.0 elif imt == PGV ( ) : freq = 2.0 else : freq = 1. / imt . period x1 = np . min ( [ - 0.18 + 0.17 * np . log10 ( freq ) , 0 ] ) if rup . hypo_depth < 20.0 : x0 = np . max ( [ 0.217 - 0.321 * np . log10 ( freq ) , 0 ] ) elif rup . hypo_depth > 35.0 : x0 = np . min ( [ 0.263 + 0.0924 * np . log10 ( freq ) , 0.35 ] ) else : x0 = 0.2 rjb = [ d if d > 1 else 1 for d in dists . rjb ] mean += ( x0 + x1 * np . log10 ( rjb ) ) / np . log10 ( np . e ) return mean , stddevs
Using a frequency dependent correction for the mean ground motion . Standard deviation is fixed .
44,126
def get_rlz ( self , rlzstr ) : r mo = re . match ( r'rlz-(\d+)' , rlzstr ) if not mo : return return self . realizations [ int ( mo . group ( 1 ) ) ]
r Get a Realization instance for a string of the form rlz - \ d +
44,127
def export ( datastore_key , calc_id = - 1 , exports = 'csv' , export_dir = '.' ) : dstore = util . read ( calc_id ) parent_id = dstore [ 'oqparam' ] . hazard_calculation_id if parent_id : dstore . parent = util . read ( parent_id ) dstore . export_dir = export_dir with performance . Monitor ( 'export' , measuremem = True ) as mon : for fmt in exports . split ( ',' ) : fnames = export_ ( ( datastore_key , fmt ) , dstore ) nbytes = sum ( os . path . getsize ( f ) for f in fnames ) print ( 'Exported %s in %s' % ( general . humansize ( nbytes ) , fnames ) ) if mon . duration > 1 : print ( mon ) dstore . close ( )
Export an output from the datastore .
44,128
def convert_UCERFSource ( self , node ) : dirname = os . path . dirname ( self . fname ) source_file = os . path . join ( dirname , node [ "filename" ] ) if "startDate" in node . attrib and "investigationTime" in node . attrib : inv_time = float ( node [ "investigationTime" ] ) if inv_time != self . investigation_time : raise ValueError ( "Source investigation time (%s) is not " "equal to configuration investigation time " "(%s)" % ( inv_time , self . investigation_time ) ) start_date = datetime . strptime ( node [ "startDate" ] , "%d/%m/%Y" ) else : start_date = None return UCERFSource ( source_file , self . investigation_time , start_date , float ( node [ "minMag" ] ) , npd = self . convert_npdist ( node ) , hdd = self . convert_hpdist ( node ) , aspect = ~ node . ruptAspectRatio , upper_seismogenic_depth = ~ node . pointGeometry . upperSeismoDepth , lower_seismogenic_depth = ~ node . pointGeometry . lowerSeismoDepth , msr = valid . SCALEREL [ ~ node . magScaleRel ] ( ) , mesh_spacing = self . rupture_mesh_spacing , trt = node [ "tectonicRegion" ] )
Converts the Ucerf Source node into an SES Control object
44,129
def build_idx_set ( branch_id , start_date ) : code_set = branch_id . split ( "/" ) code_set . insert ( 3 , "Rates" ) idx_set = { "sec" : "/" . join ( [ code_set [ 0 ] , code_set [ 1 ] , "Sections" ] ) , "mag" : "/" . join ( [ code_set [ 0 ] , code_set [ 1 ] , code_set [ 2 ] , "Magnitude" ] ) } idx_set [ "rate" ] = "/" . join ( code_set ) idx_set [ "rake" ] = "/" . join ( [ code_set [ 0 ] , code_set [ 1 ] , "Rake" ] ) idx_set [ "msr" ] = "-" . join ( code_set [ : 3 ] ) idx_set [ "geol" ] = code_set [ 0 ] if start_date : idx_set [ "grid_key" ] = "_" . join ( branch_id . replace ( "/" , "_" ) . split ( "_" ) [ : - 1 ] ) else : idx_set [ "grid_key" ] = branch_id . replace ( "/" , "_" ) idx_set [ "total_key" ] = branch_id . replace ( "/" , "|" ) return idx_set
Builds a dictionary of keys based on the branch code
44,130
def get_rupture_surface ( mag , nodal_plane , hypocenter , msr , rupture_aspect_ratio , upper_seismogenic_depth , lower_seismogenic_depth , mesh_spacing = 1.0 ) : assert ( upper_seismogenic_depth <= hypocenter . depth and lower_seismogenic_depth >= hypocenter . depth ) rdip = math . radians ( nodal_plane . dip ) azimuth_right = nodal_plane . strike azimuth_down = ( azimuth_right + 90 ) % 360 azimuth_left = ( azimuth_down + 90 ) % 360 azimuth_up = ( azimuth_left + 90 ) % 360 rup_length , rup_width = get_rupture_dimensions ( mag , nodal_plane , msr , rupture_aspect_ratio , upper_seismogenic_depth , lower_seismogenic_depth ) rup_proj_height = rup_width * math . sin ( rdip ) rup_proj_width = rup_width * math . cos ( rdip ) hheight = rup_proj_height / 2 vshift = upper_seismogenic_depth - hypocenter . depth + hheight if vshift < 0 : vshift = lower_seismogenic_depth - hypocenter . depth - hheight if vshift > 0 : vshift = 0 rupture_center = hypocenter if vshift != 0 : hshift = abs ( vshift / math . tan ( rdip ) ) rupture_center = rupture_center . point_at ( horizontal_distance = hshift , vertical_increment = vshift , azimuth = ( azimuth_up if vshift < 0 else azimuth_down ) ) theta = math . degrees ( math . atan ( ( rup_proj_width / 2. ) / ( rup_length / 2. ) ) ) hor_dist = math . sqrt ( ( rup_length / 2. ) ** 2 + ( rup_proj_width / 2. ) ** 2 ) left_top = rupture_center . point_at ( horizontal_distance = hor_dist , vertical_increment = - rup_proj_height / 2 , azimuth = ( nodal_plane . strike + 180 + theta ) % 360 ) right_top = rupture_center . point_at ( horizontal_distance = hor_dist , vertical_increment = - rup_proj_height / 2 , azimuth = ( nodal_plane . strike - theta ) % 360 ) left_bottom = rupture_center . point_at ( horizontal_distance = hor_dist , vertical_increment = rup_proj_height / 2 , azimuth = ( nodal_plane . strike + 180 - theta ) % 360 ) right_bottom = rupture_center . point_at ( horizontal_distance = hor_dist , vertical_increment = rup_proj_height / 2 , azimuth = ( nodal_plane . strike + theta ) % 360 ) return PlanarSurface ( nodal_plane . strike , nodal_plane . dip , left_top , right_top , right_bottom , left_bottom )
Create and return rupture surface object with given properties .
44,131
def get_ridx ( self , iloc ) : with h5py . File ( self . source_file , "r" ) as hdf5 : return hdf5 [ self . idx_set [ "geol" ] + "/RuptureIndex" ] [ iloc ]
List of rupture indices for the given iloc
44,132
def get_background_sids ( self , src_filter ) : branch_key = self . idx_set [ "grid_key" ] idist = src_filter . integration_distance ( DEFAULT_TRT ) with h5py . File ( self . source_file , 'r' ) as hdf5 : bg_locations = hdf5 [ "Grid/Locations" ] . value distances = min_geodetic_distance ( src_filter . sitecol . xyz , ( bg_locations [ : , 0 ] , bg_locations [ : , 1 ] ) ) mmax_areas = self . msr . get_median_area ( hdf5 [ "/" . join ( [ "Grid" , branch_key , "MMax" ] ) ] . value , 0.0 ) mmax_lengths = numpy . sqrt ( mmax_areas / self . aspect ) ok = distances <= ( 0.5 * mmax_lengths + idist ) return numpy . where ( ok ) [ 0 ] . tolist ( )
We can apply the filtering of the background sites as a pre - processing step - this is done here rather than in the sampling of the ruptures themselves
44,133
def iter_ruptures ( self ) : assert self . orig , '%s is not fully initialized' % self for ridx in range ( self . start , self . stop ) : if self . orig . rate [ ridx ] : rup = self . get_ucerf_rupture ( ridx , self . src_filter ) if rup : yield rup
Yield ruptures for the current set of indices
44,134
def get_background_sources ( self , src_filter , sample_factor = None ) : background_sids = self . get_background_sids ( src_filter ) if sample_factor is not None : background_sids = random_filter ( background_sids , sample_factor , seed = 42 ) with h5py . File ( self . source_file , "r" ) as hdf5 : grid_loc = "/" . join ( [ "Grid" , self . idx_set [ "grid_key" ] ] ) mags = hdf5 [ grid_loc + "/Magnitude" ] . value mmax = hdf5 [ grid_loc + "/MMax" ] [ background_sids ] rates = hdf5 [ grid_loc + "/RateArray" ] [ background_sids , : ] locations = hdf5 [ "Grid/Locations" ] [ background_sids , : ] sources = [ ] for i , bg_idx in enumerate ( background_sids ) : src_id = "_" . join ( [ self . idx_set [ "grid_key" ] , str ( bg_idx ) ] ) src_name = "|" . join ( [ self . idx_set [ "total_key" ] , str ( bg_idx ) ] ) mag_idx = ( self . min_mag <= mags ) & ( mags < mmax [ i ] ) src_mags = mags [ mag_idx ] src_mfd = EvenlyDiscretizedMFD ( src_mags [ 0 ] , src_mags [ 1 ] - src_mags [ 0 ] , rates [ i , mag_idx ] . tolist ( ) ) ps = PointSource ( src_id , src_name , self . tectonic_region_type , src_mfd , self . mesh_spacing , self . msr , self . aspect , self . tom , self . usd , self . lsd , Point ( locations [ i , 0 ] , locations [ i , 1 ] ) , self . npd , self . hdd ) ps . id = self . id ps . src_group_id = self . src_group_id ps . num_ruptures = ps . count_ruptures ( ) sources . append ( ps ) return sources
Turn the background model of a given branch into a set of point sources
44,135
def split ( src , chunksize = MINWEIGHT ) : for i , block in enumerate ( block_splitter ( src . iter_ruptures ( ) , chunksize , key = operator . attrgetter ( 'mag' ) ) ) : rup = block [ 0 ] source_id = '%s:%d' % ( src . source_id , i ) amfd = mfd . ArbitraryMFD ( [ rup . mag ] , [ rup . mag_occ_rate ] ) rcs = RuptureCollectionSource ( source_id , src . name , src . tectonic_region_type , amfd , block ) yield rcs
Split a complex fault source in chunks
44,136
def get_bounding_box ( self , maxdist ) : locations = [ rup . hypocenter for rup in self . ruptures ] return get_bounding_box ( locations , maxdist )
Bounding box containing all the hypocenters enlarged by the maximum distance
44,137
def show_attrs ( key , calc_id = - 1 ) : ds = util . read ( calc_id ) try : attrs = h5py . File . __getitem__ ( ds . hdf5 , key ) . attrs except KeyError : print ( '%r is not in %s' % ( key , ds ) ) else : if len ( attrs ) == 0 : print ( '%s has no attributes' % key ) for name , value in attrs . items ( ) : print ( name , value ) finally : ds . close ( )
Show the attributes of a HDF5 dataset in the datastore .
44,138
def compare_mean_curves ( calc_ref , calc , nsigma = 3 ) : dstore_ref = datastore . read ( calc_ref ) dstore = datastore . read ( calc ) imtls = dstore_ref [ 'oqparam' ] . imtls if dstore [ 'oqparam' ] . imtls != imtls : raise RuntimeError ( 'The IMTs and levels are different between ' 'calculation %d and %d' % ( calc_ref , calc ) ) sitecol_ref = dstore_ref [ 'sitecol' ] sitecol = dstore [ 'sitecol' ] site_id_ref = { ( lon , lat ) : sid for sid , lon , lat in zip ( sitecol_ref . sids , sitecol_ref . lons , sitecol_ref . lats ) } site_id = { ( lon , lat ) : sid for sid , lon , lat in zip ( sitecol . sids , sitecol . lons , sitecol . lats ) } common = set ( site_id_ref ) & set ( site_id ) if not common : raise RuntimeError ( 'There are no common sites between calculation ' '%d and %d' % ( calc_ref , calc ) ) pmap_ref = PmapGetter ( dstore_ref , sids = [ site_id_ref [ lonlat ] for lonlat in common ] ) . get_mean ( ) pmap = PmapGetter ( dstore , sids = [ site_id [ lonlat ] for lonlat in common ] ) . get_mean ( ) for lonlat in common : mean , std = pmap [ site_id [ lonlat ] ] . array . T mean_ref , std_ref = pmap_ref [ site_id_ref [ lonlat ] ] . array . T err = numpy . sqrt ( std ** 2 + std_ref ** 2 ) for imt in imtls : sl = imtls ( imt ) ok = ( numpy . abs ( mean [ sl ] - mean_ref [ sl ] ) < nsigma * err [ sl ] ) . all ( ) if not ok : md = ( numpy . abs ( mean [ sl ] - mean_ref [ sl ] ) ) . max ( ) plt . title ( 'point=%s, imt=%s, maxdiff=%.2e' % ( lonlat , imt , md ) ) plt . loglog ( imtls [ imt ] , mean_ref [ sl ] + std_ref [ sl ] , label = str ( calc_ref ) , color = 'black' ) plt . loglog ( imtls [ imt ] , mean_ref [ sl ] - std_ref [ sl ] , color = 'black' ) plt . loglog ( imtls [ imt ] , mean [ sl ] + std [ sl ] , label = str ( calc ) , color = 'red' ) plt . loglog ( imtls [ imt ] , mean [ sl ] - std [ sl ] , color = 'red' ) plt . legend ( ) plt . show ( )
Compare the hazard curves coming from two different calculations .
44,139
def _get_stddevs ( self , sites , rup , C , stddev_types , ln_y_ref , exp1 , exp2 ) : ret = [ ] for stddev_type in stddev_types : assert stddev_type in self . DEFINED_FOR_STANDARD_DEVIATION_TYPES if stddev_type == const . StdDev . TOTAL : ret . append ( 0.65 * np . ones_like ( sites . vs30 ) ) return ret
Returns the standard deviation which is fixed at 0 . 65 for every site
44,140
def build_imls ( ff , continuous_fragility_discretization , steps_per_interval = 0 ) : if ff . format == 'discrete' : imls = ff . imls if ff . nodamage and ff . nodamage < imls [ 0 ] : imls = [ ff . nodamage ] + imls if steps_per_interval > 1 : gen_imls = fine_graining ( imls , steps_per_interval ) else : gen_imls = imls else : gen_imls = numpy . linspace ( ff . minIML , ff . maxIML , continuous_fragility_discretization ) return gen_imls
Build intensity measure levels from a fragility function . If the function is continuous they are produced simply as a linear space between minIML and maxIML . If the function is discrete they are generated with a complex logic depending on the noDamageLimit and the parameter steps per interval .
44,141
def insured_loss_curve ( curve , deductible , insured_limit ) : losses , poes = curve [ : , curve [ 0 ] <= insured_limit ] limit_poe = interpolate . interp1d ( * curve , bounds_error = False , fill_value = 1 ) ( deductible ) return numpy . array ( [ losses , numpy . piecewise ( poes , [ poes > limit_poe ] , [ limit_poe , lambda x : x ] ) ] )
Compute an insured loss ratio curve given a loss ratio curve
44,142
def bcr ( eal_original , eal_retrofitted , interest_rate , asset_life_expectancy , asset_value , retrofitting_cost ) : return ( ( eal_original - eal_retrofitted ) * asset_value * ( 1 - numpy . exp ( - interest_rate * asset_life_expectancy ) ) / ( interest_rate * retrofitting_cost ) )
Compute the Benefit - Cost Ratio .
44,143
def pairwise_mean ( values ) : "Averages between a value and the next value in a sequence" return numpy . array ( [ numpy . mean ( pair ) for pair in pairwise ( values ) ] )
Averages between a value and the next value in a sequence
44,144
def pairwise_diff ( values ) : "Differences between a value and the next value in a sequence" return numpy . array ( [ x - y for x , y in pairwise ( values ) ] )
Differences between a value and the next value in a sequence
44,145
def mean_std ( fractions ) : n = fractions . shape [ 0 ] if n == 1 : return fractions [ 0 ] , numpy . ones_like ( fractions [ 0 ] ) * numpy . nan return numpy . mean ( fractions , axis = 0 ) , numpy . std ( fractions , axis = 0 , ddof = 1 )
Given an N x M matrix returns mean and std computed on the rows i . e . two M - dimensional vectors .
44,146
def broadcast ( func , composite_array , * args ) : dic = { } dtypes = [ ] for name in composite_array . dtype . names : dic [ name ] = func ( composite_array [ name ] , * args ) dtypes . append ( ( name , dic [ name ] . dtype ) ) res = numpy . zeros ( dic [ name ] . shape , numpy . dtype ( dtypes ) ) for name in dic : res [ name ] = dic [ name ] return res
Broadcast an array function over a composite array
44,147
def average_loss ( lc ) : losses , poes = ( lc [ 'loss' ] , lc [ 'poe' ] ) if lc . dtype . names else lc return - pairwise_diff ( losses ) @ pairwise_mean ( poes )
Given a loss curve array with poe and loss fields computes the average loss on a period of time .
44,148
def normalize_curves_eb ( curves ) : non_zero_curves = [ ( losses , poes ) for losses , poes in curves if losses [ - 1 ] > 0 ] if not non_zero_curves : return curves [ 0 ] [ 0 ] , numpy . array ( [ poes for _losses , poes in curves ] ) else : max_losses = [ losses [ - 1 ] for losses , _poes in non_zero_curves ] reference_curve = non_zero_curves [ numpy . argmax ( max_losses ) ] loss_ratios = reference_curve [ 0 ] curves_poes = [ interpolate . interp1d ( losses , poes , bounds_error = False , fill_value = 0 ) ( loss_ratios ) for losses , poes in curves ] for cp in curves_poes : if numpy . isnan ( cp [ 0 ] ) : cp [ 0 ] = 0 return loss_ratios , numpy . array ( curves_poes )
A more sophisticated version of normalize_curves used in the event based calculator .
44,149
def sample ( self , means , covs , idxs , epsilons = None ) : if epsilons is None : return means self . set_distribution ( epsilons ) res = self . distribution . sample ( means , covs , means * covs , idxs ) return res
Sample the epsilons and apply the corrections to the means . This method is called only if there are nonzero covs .
44,150
def mean_loss_ratios_with_steps ( self , steps ) : loss_ratios = self . mean_loss_ratios if min ( loss_ratios ) > 0.0 : loss_ratios = numpy . concatenate ( [ [ 0.0 ] , loss_ratios ] ) if max ( loss_ratios ) < 1.0 : loss_ratios = numpy . concatenate ( [ loss_ratios , [ 1.0 ] ] ) return fine_graining ( loss_ratios , steps )
Split the mean loss ratios producing a new set of loss ratios . The new set of loss ratios always includes 0 . 0 and 1 . 0
44,151
def sample ( self , probs , _covs , idxs , epsilons ) : self . set_distribution ( epsilons ) return self . distribution . sample ( self . loss_ratios , probs )
Sample the . loss_ratios with the given probabilities .
44,152
def build ( self , continuous_fragility_discretization , steps_per_interval ) : newfm = copy . copy ( self ) for key , ffl in self . items ( ) : newfm [ key ] = ffl . build ( self . limitStates , continuous_fragility_discretization , steps_per_interval ) return newfm
Return a new FragilityModel instance in which the values have been replaced with FragilityFunctionList instances .
44,153
def compute_gmfs ( rupgetter , srcfilter , param , monitor ) : getter = GmfGetter ( rupgetter , srcfilter , param [ 'oqparam' ] ) with monitor ( 'getting ruptures' ) : getter . init ( ) return getter . compute_gmfs_curves ( monitor )
Compute GMFs and optionally hazard curves
44,154
def _get_minmax_edges ( self , edge ) : if isinstance ( edge , Line ) : depth_vals = np . array ( [ node . depth for node in edge . points ] ) else : depth_vals = edge [ : , 2 ] temp_upper_depth = np . min ( depth_vals ) if not self . upper_depth : self . upper_depth = temp_upper_depth else : if temp_upper_depth < self . upper_depth : self . upper_depth = temp_upper_depth temp_lower_depth = np . max ( depth_vals ) if not self . lower_depth : self . lower_depth = temp_lower_depth else : if temp_lower_depth > self . lower_depth : self . lower_depth = temp_lower_depth
Updates the upper and lower depths based on the input edges
44,155
def _get_magnitude_term ( self , C , mag ) : if mag >= self . CONSTS [ "Mh" ] : return C [ "e1" ] + C [ "b3" ] * ( mag - self . CONSTS [ "Mh" ] ) else : return C [ "e1" ] + ( C [ "b1" ] * ( mag - self . CONSTS [ "Mh" ] ) ) + ( C [ "b2" ] * ( mag - self . CONSTS [ "Mh" ] ) ** 2. )
Returns the magnitude scaling term - equation 3
44,156
def _get_distance_term ( self , C , rjb , mag ) : c_3 = self . _get_anelastic_coeff ( C ) rval = np . sqrt ( rjb ** 2. + C [ "h" ] ** 2. ) return ( C [ "c1" ] + C [ "c2" ] * ( mag - self . CONSTS [ "Mref" ] ) ) * np . log ( rval / self . CONSTS [ "Rref" ] ) + c_3 * ( rval - self . CONSTS [ "Rref" ] )
Returns the general distance scaling term - equation 2
44,157
def _get_site_term ( self , C , vs30 ) : dg1 , dg2 = self . _get_regional_site_term ( C ) return ( C [ "g1" ] + dg1 ) + ( C [ "g2" ] + dg2 ) * np . log ( vs30 )
Returns only a linear site amplification term
44,158
def _get_stddevs ( self , C , stddev_types , num_sites ) : assert all ( stddev_type in self . DEFINED_FOR_STANDARD_DEVIATION_TYPES for stddev_type in stddev_types ) stddevs = [ np . zeros ( num_sites ) + C [ 'SigmaTot' ] for _ in stddev_types ] return stddevs
Return standard deviations as defined in tables below
44,159
def _compute_mean ( self , C , mag , rrup , hypo_depth , delta_R , delta_S , delta_V , delta_I , vs30 ) : mean = ( C [ 'A1' ] + ( C [ 'A2' ] + C [ 'A2R' ] * delta_R + C [ 'A2V' ] * delta_V ) * mag + ( C [ 'A3' ] + C [ 'A3S' ] * delta_S + C [ 'A3V' ] * delta_V ) * np . log10 ( np . power ( ( rrup ** 3 + C [ 'd' ] ** 3 ) , 1.0 / 3.0 ) ) + C [ 'A4' ] * hypo_depth + C [ 'A5' ] * delta_I ) S = self . _get_site_class ( vs30 , mean ) mean = mean + S return mean
Compute MMI Intensity Value as per Equation in Table 5 and Table 7 pag 198 .
44,160
def _get_stddevs ( self , C , stddev_types , num_sites ) : sigma_inter = C [ 'tau' ] + np . zeros ( num_sites ) sigma_intra = C [ 'sigma' ] + np . zeros ( num_sites ) std = [ ] for stddev_type in stddev_types : if stddev_type == const . StdDev . TOTAL : std += [ np . sqrt ( sigma_intra ** 2 + sigma_inter ** 2 ) ] elif stddev_type == const . StdDev . INTRA_EVENT : std . append ( sigma_intra ) elif stddev_type == const . StdDev . INTER_EVENT : std . append ( sigma_inter ) return std
Return total standard deviation as described in paragraph 5 . 2 pag 200 .
44,161
def plot_assets ( calc_id = - 1 , site_model = False ) : import matplotlib . pyplot as p from openquake . hmtk . plotting . patch import PolygonPatch dstore = util . read ( calc_id ) try : region = dstore [ 'oqparam' ] . region except KeyError : region = None sitecol = dstore [ 'sitecol' ] try : assetcol = dstore [ 'assetcol' ] . value except AttributeError : assetcol = dstore [ 'assetcol' ] . array fig = p . figure ( ) ax = fig . add_subplot ( 111 ) if region : pp = PolygonPatch ( shapely . wkt . loads ( region ) , alpha = 0.1 ) ax . add_patch ( pp ) ax . grid ( True ) if site_model and 'site_model' in dstore : sm = dstore [ 'site_model' ] sm_lons , sm_lats = sm [ 'lon' ] , sm [ 'lat' ] if len ( sm_lons ) > 1 and cross_idl ( * sm_lons ) : sm_lons %= 360 p . scatter ( sm_lons , sm_lats , marker = '.' , color = 'orange' ) p . scatter ( sitecol . complete . lons , sitecol . complete . lats , marker = '.' , color = 'gray' ) p . scatter ( assetcol [ 'lon' ] , assetcol [ 'lat' ] , marker = '.' , color = 'green' ) p . scatter ( sitecol . lons , sitecol . lats , marker = '+' , color = 'black' ) if 'discarded' in dstore : disc = numpy . unique ( dstore [ 'discarded' ] . value [ [ 'lon' , 'lat' ] ] ) p . scatter ( disc [ 'lon' ] , disc [ 'lat' ] , marker = 'x' , color = 'red' ) p . show ( )
Plot the sites and the assets
44,162
def _get_adjustment ( mag , year , mmin , completeness_year , t_f , mag_inc = 0.1 ) : if len ( completeness_year ) == 1 : if ( mag >= mmin ) and ( year >= completeness_year [ 0 ] ) : return 1.0 else : return False kval = int ( ( ( mag - mmin ) / mag_inc ) ) + 1 if ( kval >= 1 ) and ( year >= completeness_year [ kval - 1 ] ) : return t_f else : return False
If the magnitude is greater than the minimum in the completeness table and the year is greater than the corresponding completeness year then return the Weichert factor
44,163
def get_catalogue_bounding_polygon ( catalogue ) : upper_lon = np . max ( catalogue . data [ 'longitude' ] ) upper_lat = np . max ( catalogue . data [ 'latitude' ] ) lower_lon = np . min ( catalogue . data [ 'longitude' ] ) lower_lat = np . min ( catalogue . data [ 'latitude' ] ) return Polygon ( [ Point ( lower_lon , upper_lat ) , Point ( upper_lon , upper_lat ) , Point ( upper_lon , lower_lat ) , Point ( lower_lon , lower_lat ) ] )
Returns a polygon containing the bounding box of the catalogue
44,164
def make_from_catalogue ( cls , catalogue , spacing , dilate ) : new = cls ( ) cat_bbox = get_catalogue_bounding_polygon ( catalogue ) if dilate > 0 : cat_bbox = cat_bbox . dilate ( dilate ) new . update ( { 'xmin' : np . min ( cat_bbox . lons ) , 'xmax' : np . max ( cat_bbox . lons ) , 'xspc' : spacing , 'ymin' : np . min ( cat_bbox . lats ) , 'ymax' : np . max ( cat_bbox . lats ) , 'yspc' : spacing , 'zmin' : 0. , 'zmax' : np . max ( catalogue . data [ 'depth' ] ) , 'zspc' : np . max ( catalogue . data [ 'depth' ] ) } ) if new [ 'zmin' ] == new [ 'zmax' ] == new [ 'zspc' ] == 0 : new [ 'zmax' ] = new [ 'zspc' ] = 1 return new
Defines the grid on the basis of the catalogue
44,165
def write_to_csv ( self , filename ) : fid = open ( filename , 'wt' ) header_info = [ 'Longitude' , 'Latitude' , 'Depth' , 'Observed Count' , 'Smoothed Rate' , 'b-value' ] writer = csv . DictWriter ( fid , fieldnames = header_info ) headers = dict ( ( name0 , name0 ) for name0 in header_info ) writer . writerow ( headers ) for row in self . data : if row [ 4 ] == 0 : continue row_dict = { 'Longitude' : '%g' % row [ 0 ] , 'Latitude' : '%g' % row [ 1 ] , 'Depth' : '%g' % row [ 2 ] , 'Observed Count' : '%d' % row [ 3 ] , 'Smoothed Rate' : '%.6g' % row [ 4 ] , 'b-value' : '%g' % self . bval } writer . writerow ( row_dict ) fid . close ( )
Exports to simple csv
44,166
def _validate_hazard_metadata ( md ) : if ( md . get ( 'statistics' ) is not None and ( md . get ( 'smlt_path' ) is not None or md . get ( 'gsimlt_path' ) is not None ) ) : raise ValueError ( 'Cannot specify both `statistics` and logic tree ' 'paths' ) if md . get ( 'statistics' ) is not None : if md . get ( 'statistics' ) not in ( 'mean' , 'max' , 'quantile' , 'std' ) : raise ValueError ( '`statistics` must be either `mean`, `max`, or ' '`quantile`' ) else : if md . get ( 'smlt_path' ) is None or md . get ( 'gsimlt_path' ) is None : raise ValueError ( 'Both logic tree paths are required for ' 'non-statistical results' ) if md . get ( 'statistics' ) == 'quantile' : if md . get ( 'quantile_value' ) is None : raise ValueError ( 'quantile stastics results require a quantile' ' value to be specified' ) if not md . get ( 'statistics' ) == 'quantile' : if md . get ( 'quantile_value' ) is not None : raise ValueError ( 'Quantile value must be specified with ' 'quantile statistics' ) if md . get ( 'imt' ) == 'SA' : if md . get ( 'sa_period' ) is None : raise ValueError ( '`sa_period` is required for IMT == `SA`' ) if md . get ( 'sa_damping' ) is None : raise ValueError ( '`sa_damping` is required for IMT == `SA`' )
Validate metadata dict of attributes which are more or less the same for hazard curves hazard maps and disaggregation histograms .
44,167
def _set_metadata ( element , metadata , attr_map , transform = str ) : for kw , attr in attr_map . items ( ) : value = metadata . get ( kw ) if value is not None : element . set ( attr , transform ( value ) )
Set metadata attributes on a given element .
44,168
def serialize ( self , data ) : with open ( self . dest , 'wb' ) as fh : root = et . Element ( 'nrml' ) self . add_hazard_curves ( root , self . metadata , data ) nrml . write ( list ( root ) , fh )
Write a sequence of hazard curves to the specified file .
44,169
def add_hazard_curves ( self , root , metadata , data ) : hazard_curves = et . SubElement ( root , 'hazardCurves' ) _set_metadata ( hazard_curves , metadata , _ATTR_MAP ) imls_elem = et . SubElement ( hazard_curves , 'IMLs' ) imls_elem . text = ' ' . join ( map ( scientificformat , metadata [ 'imls' ] ) ) gml_ns = nrml . SERIALIZE_NS_MAP [ 'gml' ] for hc in data : hc_elem = et . SubElement ( hazard_curves , 'hazardCurve' ) gml_point = et . SubElement ( hc_elem , '{%s}Point' % gml_ns ) gml_pos = et . SubElement ( gml_point , '{%s}pos' % gml_ns ) gml_pos . text = '%s %s' % ( hc . location . x , hc . location . y ) poes_elem = et . SubElement ( hc_elem , 'poEs' ) poes_elem . text = ' ' . join ( map ( scientificformat , hc . poes ) )
Add hazard curves stored into data as child of the root element with metadata . See the documentation of the method serialize and the constructor for a description of data and metadata respectively .
44,170
def serialize ( self , data , fmt = '%10.7E' ) : gmf_set_nodes = [ ] for gmf_set in data : gmf_set_node = Node ( 'gmfSet' ) if gmf_set . investigation_time : gmf_set_node [ 'investigationTime' ] = str ( gmf_set . investigation_time ) gmf_set_node [ 'stochasticEventSetId' ] = str ( gmf_set . stochastic_event_set_id ) gmf_set_node . nodes = gen_gmfs ( gmf_set ) gmf_set_nodes . append ( gmf_set_node ) gmf_container = Node ( 'gmfCollection' ) gmf_container [ SM_TREE_PATH ] = self . sm_lt_path gmf_container [ GSIM_TREE_PATH ] = self . gsim_lt_path gmf_container . nodes = gmf_set_nodes with open ( self . dest , 'wb' ) as dest : nrml . write ( [ gmf_container ] , dest , fmt )
Serialize a collection of ground motion fields to XML .
44,171
def serialize ( self , data , investigation_time ) : with open ( self . dest , 'wb' ) as fh : root = et . Element ( 'nrml' ) ses_container = et . SubElement ( root , 'ruptureCollection' ) ses_container . set ( 'investigationTime' , str ( investigation_time ) ) for grp_id in sorted ( data ) : attrs = dict ( id = grp_id , tectonicRegion = data [ grp_id ] [ 0 ] . tectonic_region_type ) sg = et . SubElement ( ses_container , 'ruptureGroup' , attrs ) for rupture in data [ grp_id ] : rupture_to_element ( rupture , sg ) nrml . write ( list ( root ) , fh )
Serialize a collection of stochastic event sets to XML .
44,172
def serialize ( self , data ) : with open ( self . dest , 'wb' ) as fh : root = et . Element ( 'nrml' ) hazard_map = et . SubElement ( root , 'hazardMap' ) _set_metadata ( hazard_map , self . metadata , _ATTR_MAP ) for lon , lat , iml in data : node = et . SubElement ( hazard_map , 'node' ) node . set ( 'lon' , str ( lon ) ) node . set ( 'lat' , str ( lat ) ) node . set ( 'iml' , str ( iml ) ) nrml . write ( list ( root ) , fh )
Serialize hazard map data to XML .
44,173
def serialize ( self , data ) : gml_ns = nrml . SERIALIZE_NS_MAP [ 'gml' ] with open ( self . dest , 'wb' ) as fh : root = et . Element ( 'nrml' ) uh_spectra = et . SubElement ( root , 'uniformHazardSpectra' ) _set_metadata ( uh_spectra , self . metadata , _ATTR_MAP ) periods_elem = et . SubElement ( uh_spectra , 'periods' ) periods_elem . text = ' ' . join ( [ str ( x ) for x in self . metadata [ 'periods' ] ] ) for uhs in data : uhs_elem = et . SubElement ( uh_spectra , 'uhs' ) gml_point = et . SubElement ( uhs_elem , '{%s}Point' % gml_ns ) gml_pos = et . SubElement ( gml_point , '{%s}pos' % gml_ns ) gml_pos . text = '%s %s' % ( uhs . location . x , uhs . location . y ) imls_elem = et . SubElement ( uhs_elem , 'IMLs' ) imls_elem . text = ' ' . join ( [ '%10.7E' % x for x in uhs . imls ] ) nrml . write ( list ( root ) , fh )
Write a sequence of uniform hazard spectra to the specified file .
44,174
def check_config ( config , data ) : essential_keys = [ 'input_mmin' , 'b-value' , 'sigma-b' ] for key in essential_keys : if not key in config . keys ( ) : raise ValueError ( 'For KijkoSellevolBayes the key %s needs to ' 'be set in the configuation' % key ) if 'tolerance' not in config . keys ( ) or not config [ 'tolerance' ] : config [ 'tolerance' ] = 1E-5 if not config . get ( 'maximum_iterations' , False ) : config [ 'maximum_iterations' ] = 1000 if config [ 'input_mmin' ] < np . min ( data [ 'magnitude' ] ) : config [ 'input_mmin' ] = np . min ( data [ 'magnitude' ] ) if fabs ( config [ 'sigma-b' ] < 1E-15 ) : raise ValueError ( 'Sigma-b must be greater than zero!' ) return config
Check config file inputs
44,175
def _compute_mean ( self , C , mag , rjb ) : ffc = self . _compute_finite_fault_correction ( mag ) d = np . sqrt ( rjb ** 2 + ( C [ 'c7' ] ** 2 ) * ( ffc ** 2 ) ) mean = ( C [ 'c1' ] + C [ 'c2' ] * ( mag - 6. ) + C [ 'c3' ] * ( ( mag - 6. ) ** 2 ) - C [ 'c4' ] * np . log ( d ) - C [ 'c6' ] * d ) factor = np . log ( rjb / 100. ) idx = factor > 0 mean [ idx ] -= ( C [ 'c5' ] - C [ 'c4' ] ) * factor [ idx ] return mean
Compute ground motion mean value .
44,176
def _compute_finite_fault_correction ( self , mag ) : mw_j96 = mblg_to_mw_johnston_96 ( mag ) mw_ab87 = mblg_to_mw_atkinson_boore_87 ( mag ) t1 = np . exp ( - 1.25 + 0.227 * mw_j96 ) t2 = np . exp ( - 1.25 + 0.227 * mw_ab87 ) return np . sqrt ( t1 * t2 )
Compute finite fault correction term as geometric mean of correction terms obtained from Mw values calculated with Johnston 1996 and Atkinson and Boore 1987 conversion equations .
44,177
def get_vulnerability_functions_04 ( fname ) : categories = dict ( assetCategory = set ( ) , lossCategory = set ( ) , vulnerabilitySetID = set ( ) ) imts = set ( ) taxonomies = set ( ) vf_dict = { } for vset in nrml . read ( fname ) . vulnerabilityModel : categories [ 'assetCategory' ] . add ( vset [ 'assetCategory' ] ) categories [ 'lossCategory' ] . add ( vset [ 'lossCategory' ] ) categories [ 'vulnerabilitySetID' ] . add ( vset [ 'vulnerabilitySetID' ] ) IML = vset . IML imt_str = IML [ 'IMT' ] imls = ~ IML imts . add ( imt_str ) for vfun in vset . getnodes ( 'discreteVulnerability' ) : taxonomy = vfun [ 'vulnerabilityFunctionID' ] if taxonomy in taxonomies : raise InvalidFile ( 'Duplicated vulnerabilityFunctionID: %s: %s, line %d' % ( taxonomy , fname , vfun . lineno ) ) taxonomies . add ( taxonomy ) with context ( fname , vfun ) : loss_ratios = ~ vfun . lossRatio coefficients = ~ vfun . coefficientsVariation if len ( loss_ratios ) != len ( imls ) : raise InvalidFile ( 'There are %d loss ratios, but %d imls: %s, line %d' % ( len ( loss_ratios ) , len ( imls ) , fname , vfun . lossRatio . lineno ) ) if len ( coefficients ) != len ( imls ) : raise InvalidFile ( 'There are %d coefficients, but %d imls: %s, line %d' % ( len ( coefficients ) , len ( imls ) , fname , vfun . coefficientsVariation . lineno ) ) with context ( fname , vfun ) : vf_dict [ imt_str , taxonomy ] = scientific . VulnerabilityFunction ( taxonomy , imt_str , imls , loss_ratios , coefficients , vfun [ 'probabilisticDistribution' ] ) categories [ 'id' ] = '_' . join ( sorted ( categories [ 'vulnerabilitySetID' ] ) ) del categories [ 'vulnerabilitySetID' ] return vf_dict , categories
Parse the vulnerability model in NRML 0 . 4 format .
44,178
def upgrade_file ( path , multipoint ) : node0 = nrml . read ( path , chatty = False ) [ 0 ] shutil . copy ( path , path + '.bak' ) tag = striptag ( node0 . tag ) gml = True if tag == 'vulnerabilityModel' : vf_dict , cat_dict = get_vulnerability_functions_04 ( path ) node0 = Node ( 'vulnerabilityModel' , cat_dict , nodes = [ obj_to_node ( val ) for val in vf_dict . values ( ) ] ) gml = False elif tag == 'fragilityModel' : node0 = read_nrml . convert_fragility_model_04 ( nrml . read ( path ) [ 0 ] , path ) gml = False elif tag == 'sourceModel' : node0 = nrml . read ( path ) [ 0 ] dic = groupby ( node0 . nodes , operator . itemgetter ( 'tectonicRegion' ) ) node0 . nodes = [ Node ( 'sourceGroup' , dict ( tectonicRegion = trt , name = "group %s" % i ) , nodes = srcs ) for i , ( trt , srcs ) in enumerate ( dic . items ( ) , 1 ) ] if multipoint : sourceconverter . update_source_model ( node0 , path + '.bak' ) with open ( path , 'wb' ) as f : nrml . write ( [ node0 ] , f , gml = gml )
Upgrade to the latest NRML version
44,179
def _compute_term_3 ( self , C , rrup , mag ) : return ( C [ 'a3' ] * np . log10 ( rrup + C [ 'a4' ] * np . power ( 10 , C [ 'a5' ] * mag ) ) )
This computes the third term in equation 2 page 2 .
44,180
def mag_scale_rel_to_hazardlib ( mag_scale_rel , use_default = False ) : if isinstance ( mag_scale_rel , BaseMSR ) : return mag_scale_rel elif isinstance ( mag_scale_rel , str ) : if not mag_scale_rel in SCALE_RELS . keys ( ) : raise ValueError ( 'Magnitude scaling relation %s not supported!' % mag_scale_rel ) else : return SCALE_RELS [ mag_scale_rel ] ( ) else : if use_default : return WC1994 ( ) else : raise ValueError ( 'Magnitude Scaling Relation Not Defined!' )
Returns the magnitude scaling relation in a format readable by openquake . hazardlib
44,181
def npd_to_pmf ( nodal_plane_dist , use_default = False ) : if isinstance ( nodal_plane_dist , PMF ) : return nodal_plane_dist else : if use_default : return PMF ( [ ( 1.0 , NodalPlane ( 0.0 , 90.0 , 0.0 ) ) ] ) else : raise ValueError ( 'Nodal Plane distribution not defined' )
Returns the nodal plane distribution as an instance of the PMF class
44,182
def run_job ( job_ini , log_level = 'info' , log_file = None , exports = '' , username = getpass . getuser ( ) , ** kw ) : job_id = logs . init ( 'job' , getattr ( logging , log_level . upper ( ) ) ) with logs . handle ( job_id , log_level , log_file ) : job_ini = os . path . abspath ( job_ini ) oqparam = eng . job_from_file ( job_ini , job_id , username , ** kw ) kw [ 'username' ] = username eng . run_calc ( job_id , oqparam , exports , ** kw ) for line in logs . dbcmd ( 'list_outputs' , job_id , False ) : safeprint ( line ) return job_id
Run a job using the specified config file and other options .
44,183
def run_tile ( job_ini , sites_slice ) : return run_job ( job_ini , sites_slice = ( sites_slice . start , sites_slice . stop ) )
Used in tiling calculations
44,184
def del_calculation ( job_id , confirmed = False ) : if logs . dbcmd ( 'get_job' , job_id ) is None : print ( 'There is no job %d' % job_id ) return if confirmed or confirm ( 'Are you sure you want to (abort and) delete this calculation and ' 'all associated outputs?\nThis action cannot be undone. (y/n): ' ) : try : abort ( job_id ) resp = logs . dbcmd ( 'del_calc' , job_id , getpass . getuser ( ) ) except RuntimeError as err : safeprint ( err ) else : if 'success' in resp : print ( 'Removed %d' % job_id ) else : print ( resp [ 'error' ] )
Delete a calculation and all associated outputs .
44,185
def smart_run ( job_ini , oqparam , log_level , log_file , exports , reuse_hazard ) : haz_checksum = readinput . get_checksum32 ( oqparam , hazard = True ) job = logs . dbcmd ( 'get_job_from_checksum' , haz_checksum ) reuse = reuse_hazard and job and os . path . exists ( job . ds_calc_dir + '.hdf5' ) ebr = ( oqparam . calculation_mode == 'event_based_risk' and 'gmfs' not in oqparam . inputs ) if ebr : kw = dict ( calculation_mode = 'event_based' ) if ( oqparam . sites or 'sites' in oqparam . inputs or 'site_model' in oqparam . inputs ) : kw [ 'exposure_file' ] = '' else : kw = { } if not reuse : hc_id = run_job ( job_ini , log_level , log_file , exports , ** kw ) if job is None : logs . dbcmd ( 'add_checksum' , hc_id , haz_checksum ) elif not reuse_hazard or not os . path . exists ( job . ds_calc_dir + '.hdf5' ) : logs . dbcmd ( 'update_job_checksum' , hc_id , haz_checksum ) if ebr : run_job ( job_ini , log_level , log_file , exports , hazard_calculation_id = hc_id ) else : hc_id = job . id logging . info ( 'Reusing job #%d' , job . id ) run_job ( job_ini , log_level , log_file , exports , hazard_calculation_id = hc_id )
Run calculations by storing their hazard checksum and reusing previous calculations if requested .
44,186
def _get_stddevs ( self , C , sites , pga1100 , sigma_pga , stddev_types ) : std_intra = self . _compute_intra_event_std ( C , sites . vs30 , pga1100 , sigma_pga ) std_inter = C [ 't_lny' ] * np . ones_like ( sites . vs30 ) stddevs = [ ] for stddev_type in stddev_types : assert stddev_type in self . DEFINED_FOR_STANDARD_DEVIATION_TYPES if stddev_type == const . StdDev . TOTAL : stddevs . append ( self . _get_total_sigma ( C , std_intra , std_inter ) ) elif stddev_type == const . StdDev . INTRA_EVENT : stddevs . append ( std_intra ) elif stddev_type == const . StdDev . INTER_EVENT : stddevs . append ( std_inter ) return stddevs
Returns the standard deviations as described in the ALEATORY UNCERTAINTY MODEL section of the paper . Equations 13 to 19 pages 147 to 151
44,187
def _compute_intra_event_std ( self , C , vs30 , pga1100 , sigma_pga ) : sig_lnyb = np . sqrt ( C [ 's_lny' ] ** 2. - C [ 's_lnAF' ] ** 2. ) sig_lnab = np . sqrt ( sigma_pga ** 2. - C [ 's_lnAF' ] ** 2. ) alpha = self . _compute_intra_event_alpha ( C , vs30 , pga1100 ) return np . sqrt ( ( sig_lnyb ** 2. ) + ( C [ 's_lnAF' ] ** 2. ) + ( ( alpha ** 2. ) * ( sig_lnab ** 2. ) ) + ( 2.0 * alpha * C [ 'rho' ] * sig_lnyb * sig_lnab ) )
Returns the intra - event standard deviation at the site as defined in equation 15 page 147
44,188
def _compute_intra_event_alpha ( self , C , vs30 , pga1100 ) : alpha = np . zeros_like ( vs30 , dtype = float ) idx = vs30 < C [ 'k1' ] if np . any ( idx ) : temp1 = ( pga1100 [ idx ] + C [ 'c' ] * ( vs30 [ idx ] / C [ 'k1' ] ) ** C [ 'n' ] ) ** - 1. temp1 = temp1 - ( ( pga1100 [ idx ] + C [ 'c' ] ) ** - 1. ) alpha [ idx ] = C [ 'k2' ] * pga1100 [ idx ] * temp1 return alpha
Returns the linearised functional relationship between fsite and pga1100 determined from the partial derivative defined on equation 17 on page 148
44,189
def _get_total_sigma ( self , C , std_intra , std_inter ) : return np . sqrt ( std_intra ** 2. + std_inter ** 2. + C [ 'c_lny' ] ** 2. )
Returns the total sigma term for the arbitrary horizontal component of ground motion defined by equation 18 page 150
44,190
def generate_event_set ( ucerf , background_sids , src_filter , ses_idx , seed ) : serial = seed + ses_idx * TWO16 with h5py . File ( ucerf . source_file , 'r' ) as hdf5 : occurrences = ucerf . tom . sample_number_of_occurrences ( ucerf . rate , seed ) indices , = numpy . where ( occurrences ) logging . debug ( 'Considering "%s", %d ruptures' , ucerf . source_id , len ( indices ) ) ruptures = [ ] rupture_occ = [ ] for iloc , n_occ in zip ( indices , occurrences [ indices ] ) : ucerf_rup = ucerf . get_ucerf_rupture ( iloc , src_filter ) if ucerf_rup : ucerf_rup . serial = serial serial += 1 ruptures . append ( ucerf_rup ) rupture_occ . append ( n_occ ) background_ruptures , background_n_occ = sample_background_model ( hdf5 , ucerf . idx_set [ "grid_key" ] , ucerf . tom , seed , background_sids , ucerf . min_mag , ucerf . npd , ucerf . hdd , ucerf . usd , ucerf . lsd , ucerf . msr , ucerf . aspect , ucerf . tectonic_region_type ) for i , brup in enumerate ( background_ruptures ) : brup . serial = serial serial += 1 ruptures . append ( brup ) rupture_occ . extend ( background_n_occ ) assert len ( ruptures ) < TWO16 , len ( ruptures ) return ruptures , rupture_occ
Generates the event set corresponding to a particular branch
44,191
def sample_background_model ( hdf5 , branch_key , tom , seed , filter_idx , min_mag , npd , hdd , upper_seismogenic_depth , lower_seismogenic_depth , msr = WC1994 ( ) , aspect = 1.5 , trt = DEFAULT_TRT ) : bg_magnitudes = hdf5 [ "/" . join ( [ "Grid" , branch_key , "Magnitude" ] ) ] . value mag_idx = bg_magnitudes >= min_mag mags = bg_magnitudes [ mag_idx ] rates = hdf5 [ "/" . join ( [ "Grid" , branch_key , "RateArray" ] ) ] [ filter_idx , : ] rates = rates [ : , mag_idx ] valid_locs = hdf5 [ "Grid/Locations" ] [ filter_idx , : ] sampler = tom . sample_number_of_occurrences ( rates , seed ) background_ruptures = [ ] background_n_occ = [ ] for i , mag in enumerate ( mags ) : rate_idx = numpy . where ( sampler [ : , i ] ) [ 0 ] rate_cnt = sampler [ rate_idx , i ] occurrence = rates [ rate_idx , i ] locations = valid_locs [ rate_idx , : ] ruptures = generate_background_ruptures ( tom , locations , occurrence , mag , npd , hdd , upper_seismogenic_depth , lower_seismogenic_depth , msr , aspect , trt ) background_ruptures . extend ( ruptures ) background_n_occ . extend ( rate_cnt . tolist ( ) ) return background_ruptures , background_n_occ
Generates a rupture set from a sample of the background model
44,192
def get_median_area ( self , mag , rake ) : assert rake is None or - 180 <= rake <= 180 if rake is None : return 10.0 ** ( - 3.49 + 0.91 * mag ) elif ( - 45 <= rake <= 45 ) or ( rake >= 135 ) or ( rake <= - 135 ) : return 10.0 ** ( - 3.42 + 0.90 * mag ) elif rake > 0 : return 10.0 ** ( - 3.99 + 0.98 * mag ) else : return 10.0 ** ( - 2.87 + 0.82 * mag )
The values are a function of both magnitude and rake .
44,193
def get_std_dev_area ( self , mag , rake ) : assert rake is None or - 180 <= rake <= 180 if rake is None : return 0.24 elif ( - 45 <= rake <= 45 ) or ( rake >= 135 ) or ( rake <= - 135 ) : return 0.22 elif rake > 0 : return 0.26 else : return 0.22
Standard deviation for WC1994 . Magnitude is ignored .
44,194
def get_std_dev_mag ( self , rake ) : assert rake is None or - 180 <= rake <= 180 if rake is None : return 0.24 elif ( - 45 <= rake <= 45 ) or ( rake >= 135 ) or ( rake <= - 135 ) : return 0.23 elif rake > 0 : return 0.25 else : return 0.25
Standard deviation on the magnitude for the WC1994 area relation .
44,195
def set_parameters ( self ) : for key in dir ( self ) : if key . startswith ( 'REQUIRES_' ) : setattr ( self , key , getattr ( self . gmpe , key ) ) if key . startswith ( 'DEFINED_' ) : if not key . endswith ( 'FOR_INTENSITY_MEASURE_TYPES' ) : setattr ( self , key , getattr ( self . gmpe , key ) )
Combines the parameters of the GMPE provided at the construction level with the ones assigned to the average GMPE .
44,196
def from_points_list ( cls , points ) : lons = numpy . zeros ( len ( points ) , dtype = float ) lats = lons . copy ( ) depths = lons . copy ( ) for i in range ( len ( points ) ) : lons [ i ] = points [ i ] . longitude lats [ i ] = points [ i ] . latitude depths [ i ] = points [ i ] . depth if not depths . any ( ) : depths = None return cls ( lons , lats , depths )
Create a mesh object from a collection of points .
44,197
def get_min_distance ( self , mesh ) : return cdist ( self . xyz , mesh . xyz ) . min ( axis = 0 )
Compute and return the minimum distance from the mesh to each point in another mesh .
44,198
def get_closest_points ( self , mesh ) : min_idx = cdist ( self . xyz , mesh . xyz ) . argmin ( axis = 0 ) if hasattr ( mesh , 'shape' ) : min_idx = min_idx . reshape ( mesh . shape ) lons = self . lons . take ( min_idx ) lats = self . lats . take ( min_idx ) deps = self . depths . take ( min_idx ) return Mesh ( lons , lats , deps )
Find closest point of this mesh for each point in the other mesh
44,199
def get_distance_matrix ( self ) : assert self . lons . ndim == 1 distances = geodetic . geodetic_distance ( self . lons . reshape ( self . lons . shape + ( 1 , ) ) , self . lats . reshape ( self . lats . shape + ( 1 , ) ) , self . lons , self . lats ) return numpy . matrix ( distances , copy = False )
Compute and return distances between each pairs of points in the mesh .