idx int64 0 63k | question stringlengths 61 4.03k | target stringlengths 6 1.23k |
|---|---|---|
44,300 | def convert ( self , imtls , nsites , idx = 0 ) : curves = numpy . zeros ( nsites , imtls . dt ) for imt in curves . dtype . names : curves_by_imt = curves [ imt ] for sid in self : curves_by_imt [ sid ] = self [ sid ] . array [ imtls ( imt ) , idx ] return curves | Convert a probability map into a composite array of length nsites and dtype imtls . dt . |
44,301 | def filter ( self , sids ) : dic = self . __class__ ( self . shape_y , self . shape_z ) for sid in sids : try : dic [ sid ] = self [ sid ] except KeyError : pass return dic | Extracs a submap of self for the given sids . |
44,302 | def extract ( self , inner_idx ) : out = self . __class__ ( self . shape_y , 1 ) for sid in self : curve = self [ sid ] array = curve . array [ : , inner_idx ] . reshape ( - 1 , 1 ) out [ sid ] = ProbabilityCurve ( array ) return out | Extracts a component of the underlying ProbabilityCurves specified by the index inner_idx . |
44,303 | def compare ( what , imt , calc_ids , files , samplesites = 100 , rtol = .1 , atol = 1E-4 ) : sids , imtls , poes , arrays = getdata ( what , calc_ids , samplesites ) try : levels = imtls [ imt ] except KeyError : sys . exit ( '%s not found. The available IMTs are %s' % ( imt , list ( imtls ) ) ) imt2idx = { imt : i for i , imt in enumerate ( imtls ) } head = [ 'site_id' ] if files else [ 'site_id' , 'calc_id' ] if what == 'hcurves' : array_imt = arrays [ : , : , imtls ( imt ) ] header = head + [ '%.5f' % lvl for lvl in levels ] else : array_imt = arrays [ : , : , imt2idx [ imt ] ] header = head + [ str ( poe ) for poe in poes ] rows = collections . defaultdict ( list ) diff_idxs = get_diff_idxs ( array_imt , rtol , atol ) if len ( diff_idxs ) == 0 : print ( 'There are no differences within the tolerance of %d%%' % ( rtol * 100 ) ) return arr = array_imt . transpose ( 1 , 0 , 2 ) for sid , array in sorted ( zip ( sids [ diff_idxs ] , arr [ diff_idxs ] ) ) : for calc_id , cols in zip ( calc_ids , array ) : if files : rows [ calc_id ] . append ( [ sid ] + list ( cols ) ) else : rows [ 'all' ] . append ( [ sid , calc_id ] + list ( cols ) ) if files : fdict = { calc_id : open ( '%s.txt' % calc_id , 'w' ) for calc_id in calc_ids } for calc_id , f in fdict . items ( ) : f . write ( views . rst_table ( rows [ calc_id ] , header ) ) print ( 'Generated %s' % f . name ) else : print ( views . rst_table ( rows [ 'all' ] , header ) ) | Compare the hazard curves or maps of two or more calculations |
44,304 | def build_filename ( filename , filetype = 'png' , resolution = 300 ) : filevals = os . path . splitext ( filename ) if filevals [ 1 ] : filetype = filevals [ 1 ] [ 1 : ] if not filetype : filetype = 'png' filename = filevals [ 0 ] + '.' + filetype if not resolution : resolution = 300 return filename , filetype , resolution | Uses the input properties to create the string of the filename |
44,305 | def _get_catalogue_bin_limits ( catalogue , dmag ) : mag_bins = np . arange ( float ( np . floor ( np . min ( catalogue . data [ 'magnitude' ] ) ) ) - dmag , float ( np . ceil ( np . max ( catalogue . data [ 'magnitude' ] ) ) ) + dmag , dmag ) counter = np . histogram ( catalogue . data [ 'magnitude' ] , mag_bins ) [ 0 ] idx = np . where ( counter > 0 ) [ 0 ] mag_bins = mag_bins [ idx [ 0 ] : ( idx [ - 1 ] + 2 ) ] return mag_bins | Returns the magnitude bins corresponing to the catalogue |
44,306 | def plot_depth_histogram ( catalogue , bin_width , normalisation = False , bootstrap = None , filename = None , figure_size = ( 8 , 6 ) , filetype = 'png' , dpi = 300 , ax = None ) : if ax is None : fig , ax = plt . subplots ( figsize = figure_size ) else : fig = ax . get_figure ( ) if len ( catalogue . data [ 'depth' ] ) == 0 : raise ValueError ( 'No depths reported in catalogue!' ) depth_bins = np . arange ( 0. , np . max ( catalogue . data [ 'depth' ] ) + bin_width , bin_width ) depth_hist = catalogue . get_depth_distribution ( depth_bins , normalisation , bootstrap ) ax . bar ( depth_bins [ : - 1 ] , depth_hist , width = 0.95 * bin_width , edgecolor = 'k' ) ax . set_xlabel ( 'Depth (km)' ) if normalisation : ax . set_ylabel ( 'Probability Mass Function' ) else : ax . set_ylabel ( 'Count' ) ax . set_title ( 'Depth Histogram' ) _save_image ( fig , filename , filetype , dpi ) | Creates a histogram of the depths in the catalogue |
44,307 | def plot_magnitude_depth_density ( catalogue , mag_int , depth_int , logscale = False , normalisation = False , bootstrap = None , filename = None , figure_size = ( 8 , 6 ) , filetype = 'png' , dpi = 300 , ax = None ) : if len ( catalogue . data [ 'depth' ] ) == 0 : raise ValueError ( 'No depths reported in catalogue!' ) depth_bins = np . arange ( 0. , np . max ( catalogue . data [ 'depth' ] ) + depth_int , depth_int ) mag_bins = _get_catalogue_bin_limits ( catalogue , mag_int ) mag_depth_dist = catalogue . get_magnitude_depth_distribution ( mag_bins , depth_bins , normalisation , bootstrap ) vmin_val = np . min ( mag_depth_dist [ mag_depth_dist > 0. ] ) if ax is None : fig , ax = plt . subplots ( figsize = figure_size ) else : fig = ax . get_figure ( ) if logscale : normaliser = LogNorm ( vmin = vmin_val , vmax = np . max ( mag_depth_dist ) ) else : normaliser = Normalize ( vmin = 0 , vmax = np . max ( mag_depth_dist ) ) im = ax . pcolor ( mag_bins [ : - 1 ] , depth_bins [ : - 1 ] , mag_depth_dist . T , norm = normaliser ) ax . set_xlabel ( 'Magnitude' ) ax . set_ylabel ( 'Depth (km)' ) ax . set_xlim ( mag_bins [ 0 ] , mag_bins [ - 1 ] ) ax . set_ylim ( depth_bins [ 0 ] , depth_bins [ - 1 ] ) fig . colorbar ( im , ax = ax ) if normalisation : ax . set_title ( 'Magnitude-Depth Density' ) else : ax . set_title ( 'Magnitude-Depth Count' ) _save_image ( fig , filename , filetype , dpi ) | Creates a density plot of the magnitude and depth distribution |
44,308 | def plot_magnitude_time_scatter ( catalogue , plot_error = False , fmt_string = 'o' , filename = None , figure_size = ( 8 , 6 ) , filetype = 'png' , dpi = 300 , ax = None ) : if ax is None : fig , ax = plt . subplots ( figsize = figure_size ) else : fig = ax . get_figure ( ) dtime = catalogue . get_decimal_time ( ) if len ( catalogue . data [ 'sigmaMagnitude' ] ) == 0 : print ( 'Magnitude Error is missing - neglecting error bars!' ) plot_error = False if plot_error : ax . errorbar ( dtime , catalogue . data [ 'magnitude' ] , xerr = None , yerr = catalogue . data [ 'sigmaMagnitude' ] , fmt = fmt_string ) else : ax . plot ( dtime , catalogue . data [ 'magnitude' ] , fmt_string ) ax . set_xlabel ( 'Year' ) ax . set_ylabel ( 'Magnitude' ) ax . set_title ( 'Magnitude-Time Plot' ) _save_image ( fig , filename , filetype , dpi ) | Creates a simple scatter plot of magnitude with time |
44,309 | def plot_magnitude_time_density ( catalogue , mag_int , time_int , completeness = None , normalisation = False , logscale = True , bootstrap = None , xlim = [ ] , ylim = [ ] , filename = None , figure_size = ( 8 , 6 ) , filetype = 'png' , dpi = 300 , ax = None ) : if ax is None : fig , ax = plt . subplots ( figsize = figure_size ) else : fig = ax . get_figure ( ) if isinstance ( mag_int , ( np . ndarray , list ) ) : mag_bins = mag_int else : mag_bins = np . arange ( np . min ( catalogue . data [ 'magnitude' ] ) , np . max ( catalogue . data [ 'magnitude' ] ) + mag_int / 2. , mag_int ) if isinstance ( time_int , ( np . ndarray , list ) ) : time_bins = time_int else : time_bins = np . arange ( float ( np . min ( catalogue . data [ 'year' ] ) ) , float ( np . max ( catalogue . data [ 'year' ] ) ) + 1. , float ( time_int ) ) mag_time_dist = catalogue . get_magnitude_time_distribution ( mag_bins , time_bins , normalisation , bootstrap ) vmin_val = np . min ( mag_time_dist [ mag_time_dist > 0. ] ) if logscale : norm_data = LogNorm ( vmin = vmin_val , vmax = np . max ( mag_time_dist ) ) else : if normalisation : norm_data = Normalize ( vmin = vmin_val , vmax = np . max ( mag_time_dist ) ) else : norm_data = Normalize ( vmin = 1.0 , vmax = np . max ( mag_time_dist ) ) im = ax . pcolor ( time_bins [ : - 1 ] , mag_bins [ : - 1 ] , mag_time_dist . T , norm = norm_data ) ax . set_xlabel ( 'Time (year)' ) ax . set_ylabel ( 'Magnitude' ) if len ( xlim ) == 2 : ax . set_xlim ( xlim [ 0 ] , xlim [ 1 ] ) else : ax . set_xlim ( time_bins [ 0 ] , time_bins [ - 1 ] ) if len ( ylim ) == 2 : ax . set_ylim ( ylim [ 0 ] , ylim [ 1 ] ) else : ax . set_ylim ( mag_bins [ 0 ] , mag_bins [ - 1 ] + ( mag_bins [ - 1 ] - mag_bins [ - 2 ] ) ) if normalisation : fig . colorbar ( im , label = 'Event Density' , shrink = 0.9 , ax = ax ) else : fig . colorbar ( im , label = 'Event Count' , shrink = 0.9 , ax = ax ) ax . grid ( True ) if completeness is not None : _plot_completeness ( ax , completeness , time_bins [ 0 ] , time_bins [ - 1 ] ) _save_image ( fig , filename , filetype , dpi ) | Creates a plot of magnitude - time density |
44,310 | def _plot_completeness ( ax , comw , start_time , end_time ) : comw = np . array ( comw ) comp = np . column_stack ( [ np . hstack ( [ end_time , comw [ : , 0 ] , start_time ] ) , np . hstack ( [ comw [ 0 , 1 ] , comw [ : , 1 ] , comw [ - 1 , 1 ] ] ) ] ) ax . step ( comp [ : - 1 , 0 ] , comp [ 1 : , 1 ] , linestyle = '-' , where = "post" , linewidth = 3 , color = 'brown' ) | Adds completeness intervals to a plot |
44,311 | def get_completeness_adjusted_table ( catalogue , completeness , dmag , offset = 1.0E-5 , end_year = None , plot = False , figure_size = ( 8 , 6 ) , filename = None , filetype = 'png' , dpi = 300 , ax = None ) : if not end_year : end_year = catalogue . end_year mag_bins = _get_catalogue_bin_limits ( catalogue , dmag ) obs_time = end_year - completeness [ : , 0 ] + 1. obs_rates = np . zeros_like ( mag_bins ) durations = np . zeros_like ( mag_bins ) n_comp = np . shape ( completeness ) [ 0 ] for iloc in range ( n_comp ) : low_mag = completeness [ iloc , 1 ] comp_year = completeness [ iloc , 0 ] if iloc == ( n_comp - 1 ) : idx = np . logical_and ( catalogue . data [ 'magnitude' ] >= low_mag - offset , catalogue . data [ 'year' ] >= comp_year ) high_mag = mag_bins [ - 1 ] obs_idx = mag_bins >= ( low_mag - offset ) else : high_mag = completeness [ iloc + 1 , 1 ] mag_idx = np . logical_and ( catalogue . data [ 'magnitude' ] >= low_mag - offset , catalogue . data [ 'magnitude' ] < ( high_mag - offset ) ) idx = np . logical_and ( mag_idx , catalogue . data [ 'year' ] >= ( comp_year - offset ) ) obs_idx = np . logical_and ( mag_bins >= ( low_mag - offset ) , mag_bins < ( high_mag + offset ) ) temp_rates = np . histogram ( catalogue . data [ 'magnitude' ] [ idx ] , mag_bins [ obs_idx ] ) [ 0 ] temp_rates = temp_rates . astype ( float ) / obs_time [ iloc ] obs_rates [ obs_idx [ : - 1 ] ] = temp_rates durations [ obs_idx [ : - 1 ] ] = obs_time [ iloc ] selector = np . where ( obs_rates > 0. ) [ 0 ] mag_bins = mag_bins [ selector ] obs_rates = obs_rates [ selector ] durations = durations [ selector ] cum_rates = np . array ( [ sum ( obs_rates [ iloc : ] ) for iloc in range ( 0 , len ( obs_rates ) ) ] ) if plot : plt . figure ( figsize = figure_size ) plt . semilogy ( mag_bins + dmag / 2. , obs_rates , "bo" , label = "Incremental" ) plt . semilogy ( mag_bins + dmag / 2. , cum_rates , "rs" , label = "Cumulative" ) plt . xlabel ( "Magnitude (M)" , fontsize = 16 ) plt . ylabel ( "Annual Rate" , fontsize = 16 ) plt . grid ( True ) plt . legend ( fontsize = 16 ) if filename : plt . savefig ( filename , format = filetype , dpi = dpi , bbox_inches = "tight" ) return np . column_stack ( [ mag_bins , durations , obs_rates , cum_rates , np . log10 ( cum_rates ) ] ) | Counts the number of earthquakes in each magnitude bin and normalises the rate to annual rates taking into account the completeness |
44,312 | def plot_observed_recurrence ( catalogue , completeness , dmag , end_year = None , filename = None , figure_size = ( 8 , 6 ) , filetype = 'png' , dpi = 300 , ax = None ) : if isinstance ( completeness , float ) : completeness = np . array ( [ [ np . min ( catalogue . data [ 'year' ] ) , completeness ] ] ) if not end_year : end_year = catalogue . update_end_year ( ) catalogue . data [ "dtime" ] = catalogue . get_decimal_time ( ) cent_mag , t_per , n_obs = get_completeness_counts ( catalogue , completeness , dmag ) obs_rates = n_obs / t_per cum_obs_rates = np . array ( [ np . sum ( obs_rates [ i : ] ) for i in range ( len ( obs_rates ) ) ] ) if ax is None : fig , ax = plt . subplots ( figsize = figure_size ) else : fig = ax . get_figure ( ) ax . semilogy ( cent_mag , obs_rates , 'bo' , label = "Incremental" ) ax . semilogy ( cent_mag , cum_obs_rates , 'rs' , label = "Cumulative" ) ax . set_xlim ( [ cent_mag [ 0 ] - 0.1 , cent_mag [ - 1 ] + 0.1 ] ) ax . set_xlabel ( 'Magnitude' ) ax . set_ylabel ( 'Annual Rate' ) ax . legend ( ) _save_image ( fig , filename , filetype , dpi ) | Plots the observed recurrence taking into account the completeness |
44,313 | def get_number_observations ( self ) : if isinstance ( self . data , dict ) and ( 'exx' in self . data . keys ( ) ) : return len ( self . data [ 'exx' ] ) else : return 0 | Returns the number of observations in the data file |
44,314 | def plot_lc ( calc_id , aid = None ) : dstore = util . read ( calc_id ) dset = dstore [ 'agg_curves-rlzs' ] if aid is None : plt = make_figure ( dset . attrs [ 'return_periods' ] , dset . value ) else : sys . exit ( 'Not implemented yet' ) plt . show ( ) | Plot loss curves given a calculation id and an asset ordinal . |
44,315 | def get_weighted_poes ( gsim , sctx , rctx , dctx , imt , imls , truncation_level , weighting = DEFAULT_WEIGHTING ) : if truncation_level is not None and truncation_level < 0 : raise ValueError ( 'truncation level must be zero, positive number ' 'or None' ) gsim . _check_imt ( imt ) adjustment = nga_west2_epistemic_adjustment ( rctx . mag , dctx . rrup ) adjustment = adjustment . reshape ( adjustment . shape + ( 1 , ) ) if truncation_level == 0 : imls = gsim . to_distribution_values ( imls ) mean , _ = gsim . get_mean_and_stddevs ( sctx , rctx , dctx , imt , [ ] ) mean = mean . reshape ( mean . shape + ( 1 , ) ) output = np . zeros ( [ mean . shape [ 0 ] , imls . shape [ 0 ] ] ) for ( wgt , fct ) in weighting : output += ( wgt * ( imls <= ( mean + ( fct * adjustment ) ) ) . astype ( float ) ) return output else : assert ( const . StdDev . TOTAL in gsim . DEFINED_FOR_STANDARD_DEVIATION_TYPES ) imls = gsim . to_distribution_values ( imls ) mean , [ stddev ] = gsim . get_mean_and_stddevs ( sctx , rctx , dctx , imt , [ const . StdDev . TOTAL ] ) mean = mean . reshape ( mean . shape + ( 1 , ) ) stddev = stddev . reshape ( stddev . shape + ( 1 , ) ) output = np . zeros ( [ mean . shape [ 0 ] , imls . shape [ 0 ] ] ) for ( wgt , fct ) in weighting : values = ( imls - ( mean + ( fct * adjustment ) ) ) / stddev if truncation_level is None : output += ( wgt * _norm_sf ( values ) ) else : output += ( wgt * _truncnorm_sf ( truncation_level , values ) ) return output | This function implements the NGA West 2 GMPE epistemic uncertainty adjustment factor without re - calculating the actual GMPE each time . |
44,316 | def register_fields ( w ) : PARAMS_LIST = [ BASE_PARAMS , GEOMETRY_PARAMS , MFD_PARAMS ] for PARAMS in PARAMS_LIST : for _ , param , dtype in PARAMS : w . field ( param , fieldType = dtype , size = FIELD_SIZE ) PARAMS_LIST = [ RATE_PARAMS , STRIKE_PARAMS , DIP_PARAMS , RAKE_PARAMS , NPW_PARAMS , HDEPTH_PARAMS , HDW_PARAMS , PLANES_STRIKES_PARAM , PLANES_DIPS_PARAM ] for PARAMS in PARAMS_LIST : for param , dtype in PARAMS : w . field ( param , fieldType = dtype , size = FIELD_SIZE ) w . field ( 'sourcetype' , 'C' ) | Register shapefile fields . |
44,317 | def extract_source_params ( src ) : tags = get_taglist ( src ) data = [ ] for key , param , vtype in BASE_PARAMS : if key in src . attrib : if vtype == "c" : data . append ( ( param , src . attrib [ key ] ) ) elif vtype == "f" : data . append ( ( param , float ( src . attrib [ key ] ) ) ) else : data . append ( ( param , None ) ) elif key in tags : if vtype == "c" : data . append ( ( param , src . nodes [ tags . index ( key ) ] . text ) ) elif vtype == "f" : data . append ( ( param , float ( src . nodes [ tags . index ( key ) ] . text ) ) ) else : data . append ( ( param , None ) ) else : data . append ( ( param , None ) ) return dict ( data ) | Extract params from source object . |
44,318 | def parse_complex_fault_geometry ( node ) : assert "complexFaultGeometry" in node . tag geometry = { "intermediateEdges" : [ ] } for subnode in node : crds = subnode . nodes [ 0 ] . nodes [ 0 ] . text if "faultTopEdge" in subnode . tag : geometry [ "faultTopEdge" ] = numpy . array ( [ [ crds [ i ] , crds [ i + 1 ] , crds [ i + 2 ] ] for i in range ( 0 , len ( crds ) , 3 ) ] ) geometry [ "upperSeismoDepth" ] = numpy . min ( geometry [ "faultTopEdge" ] [ : , 2 ] ) elif "faultBottomEdge" in subnode . tag : geometry [ "faultBottomEdge" ] = numpy . array ( [ [ crds [ i ] , crds [ i + 1 ] , crds [ i + 2 ] ] for i in range ( 0 , len ( crds ) , 3 ) ] ) geometry [ "lowerSeismoDepth" ] = numpy . max ( geometry [ "faultBottomEdge" ] [ : , 2 ] ) elif "intermediateEdge" in subnode . tag : geometry [ "intermediateEdges" ] . append ( numpy . array ( [ [ crds [ i ] , crds [ i + 1 ] , crds [ i + 2 ] ] for i in range ( 0 , len ( crds ) , 3 ) ] ) ) else : pass geometry [ "dip" ] = None return geometry | Parses a complex fault geometry node returning both the attributes and parameters in a dictionary |
44,319 | def parse_planar_fault_geometry ( node ) : assert "planarSurface" in node . tag geometry = { "strike" : node . attrib [ "strike" ] , "dip" : node . attrib [ "dip" ] } upper_depth = numpy . inf lower_depth = 0.0 tags = get_taglist ( node ) corner_points = [ ] for locn in [ "topLeft" , "topRight" , "bottomRight" , "bottomLeft" ] : plane = node . nodes [ tags . index ( locn ) ] upper_depth = plane [ "depth" ] if plane [ "depth" ] < upper_depth else upper_depth lower_depth = plane [ "depth" ] if plane [ "depth" ] > lower_depth else lower_depth corner_points . append ( [ plane [ "lon" ] , plane [ "lat" ] , plane [ "depth" ] ] ) geometry [ "upperSeismoDepth" ] = upper_depth geometry [ "lowerSeismoDepth" ] = lower_depth geometry [ "corners" ] = numpy . array ( corner_points ) return geometry | Parses a planar fault geometry node returning both the attributes and parameters in a dictionary |
44,320 | def extract_mfd_params ( src ) : tags = get_taglist ( src ) if "incrementalMFD" in tags : mfd_node = src . nodes [ tags . index ( "incrementalMFD" ) ] elif "truncGutenbergRichterMFD" in tags : mfd_node = src . nodes [ tags . index ( "truncGutenbergRichterMFD" ) ] elif "arbitraryMFD" in tags : mfd_node = src . nodes [ tags . index ( "arbitraryMFD" ) ] elif "YoungsCoppersmithMFD" in tags : mfd_node = src . nodes [ tags . index ( "YoungsCoppersmithMFD" ) ] else : raise ValueError ( "Source %s contains no supported MFD type!" % src . tag ) data = [ ] rates = [ ] for key , param , vtype in MFD_PARAMS : if key in mfd_node . attrib and mfd_node . attrib [ key ] is not None : data . append ( ( param , mfd_node . attrib [ key ] ) ) else : data . append ( ( param , None ) ) if ( "incrementalMFD" or "arbitraryMFD" ) in mfd_node . tag : rates = ~ mfd_node . occurRates n_r = len ( rates ) if n_r > MAX_RATES : raise ValueError ( "Number of rates in source %s too large " "to be placed into shapefile" % src . tag ) rate_dict = dict ( [ ( key , rates [ i ] if i < n_r else None ) for i , ( key , _ ) in enumerate ( RATE_PARAMS ) ] ) elif "YoungsCoppersmithMFD" in mfd_node . tag : rate_dict = dict ( [ ( key , mfd_node . attrib [ 'characteristicRate' ] ) for i , ( key , _ ) in enumerate ( RATE_PARAMS ) ] ) else : rate_dict = dict ( [ ( key , None ) for i , ( key , _ ) in enumerate ( RATE_PARAMS ) ] ) return dict ( data ) , rate_dict | Extracts the MFD parameters from an object |
44,321 | def extract_source_hypocentral_depths ( src ) : if "pointSource" not in src . tag and "areaSource" not in src . tag : hds = dict ( [ ( key , None ) for key , _ in HDEPTH_PARAMS ] ) hdsw = dict ( [ ( key , None ) for key , _ in HDW_PARAMS ] ) return hds , hdsw tags = get_taglist ( src ) hdd_nodeset = src . nodes [ tags . index ( "hypoDepthDist" ) ] if len ( hdd_nodeset ) > MAX_HYPO_DEPTHS : raise ValueError ( "Number of hypocentral depths %s exceeds stated " "maximum of %s" % ( str ( len ( hdd_nodeset ) ) , str ( MAX_HYPO_DEPTHS ) ) ) if len ( hdd_nodeset ) : hds = [ ] hdws = [ ] for hdd_node in hdd_nodeset : hds . append ( float ( hdd_node . attrib [ "depth" ] ) ) hdws . append ( float ( hdd_node . attrib [ "probability" ] ) ) hds = expand_src_param ( hds , HDEPTH_PARAMS ) hdsw = expand_src_param ( hdws , HDW_PARAMS ) else : hds = dict ( [ ( key , None ) for key , _ in HDEPTH_PARAMS ] ) hdsw = dict ( [ ( key , None ) for key , _ in HDW_PARAMS ] ) return hds , hdsw | Extract source hypocentral depths . |
44,322 | def extract_source_planes_strikes_dips ( src ) : if "characteristicFaultSource" not in src . tag : strikes = dict ( [ ( key , None ) for key , _ in PLANES_STRIKES_PARAM ] ) dips = dict ( [ ( key , None ) for key , _ in PLANES_DIPS_PARAM ] ) return strikes , dips tags = get_taglist ( src ) surface_set = src . nodes [ tags . index ( "surface" ) ] strikes = [ ] dips = [ ] num_planes = 0 for surface in surface_set : if "planarSurface" in surface . tag : strikes . append ( float ( surface . attrib [ "strike" ] ) ) dips . append ( float ( surface . attrib [ "dip" ] ) ) num_planes += 1 if num_planes > MAX_PLANES : raise ValueError ( "Number of planes in sourcs %s exceededs maximum " "of %s" % ( str ( num_planes ) , str ( MAX_PLANES ) ) ) if num_planes : strikes = expand_src_param ( strikes , PLANES_STRIKES_PARAM ) dips = expand_src_param ( dips , PLANES_DIPS_PARAM ) else : strikes = dict ( [ ( key , None ) for key , _ in PLANES_STRIKES_PARAM ] ) dips = dict ( [ ( key , None ) for key , _ in PLANES_DIPS_PARAM ] ) return strikes , dips | Extract strike and dip angles for source defined by multiple planes . |
44,323 | def set_params ( w , src ) : params = extract_source_params ( src ) params . update ( extract_geometry_params ( src ) ) mfd_pars , rate_pars = extract_mfd_params ( src ) params . update ( mfd_pars ) params . update ( rate_pars ) strikes , dips , rakes , np_weights = extract_source_nodal_planes ( src ) params . update ( strikes ) params . update ( dips ) params . update ( rakes ) params . update ( np_weights ) hds , hdsw = extract_source_hypocentral_depths ( src ) params . update ( hds ) params . update ( hdsw ) pstrikes , pdips = extract_source_planes_strikes_dips ( src ) params . update ( pstrikes ) params . update ( pdips ) params [ 'sourcetype' ] = striptag ( src . tag ) w . record ( ** params ) | Set source parameters . |
44,324 | def set_area_geometry ( w , src ) : assert "areaSource" in src . tag geometry_node = src . nodes [ get_taglist ( src ) . index ( "areaGeometry" ) ] area_attrs = parse_area_geometry ( geometry_node ) w . poly ( parts = [ area_attrs [ "polygon" ] . tolist ( ) ] ) | Set area polygon as shapefile geometry |
44,325 | def set_point_geometry ( w , src ) : assert "pointSource" in src . tag geometry_node = src . nodes [ get_taglist ( src ) . index ( "pointGeometry" ) ] point_attrs = parse_point_geometry ( geometry_node ) w . point ( point_attrs [ "point" ] [ 0 ] , point_attrs [ "point" ] [ 1 ] ) | Set point location as shapefile geometry . |
44,326 | def set_simple_fault_geometry ( w , src ) : assert "simpleFaultSource" in src . tag geometry_node = src . nodes [ get_taglist ( src ) . index ( "simpleFaultGeometry" ) ] fault_attrs = parse_simple_fault_geometry ( geometry_node ) w . line ( parts = [ fault_attrs [ "trace" ] . tolist ( ) ] ) | Set simple fault trace coordinates as shapefile geometry . |
44,327 | def set_simple_fault_geometry_3D ( w , src ) : assert "simpleFaultSource" in src . tag geometry_node = src . nodes [ get_taglist ( src ) . index ( "simpleFaultGeometry" ) ] fault_attrs = parse_simple_fault_geometry ( geometry_node ) build_polygon_from_fault_attrs ( w , fault_attrs ) | Builds a 3D polygon from a node instance |
44,328 | def appraise_source_model ( self ) : for src in self . sources : src_taglist = get_taglist ( src ) if "areaSource" in src . tag : self . has_area_source = True npd_node = src . nodes [ src_taglist . index ( "nodalPlaneDist" ) ] npd_size = len ( npd_node ) hdd_node = src . nodes [ src_taglist . index ( "hypoDepthDist" ) ] hdd_size = len ( hdd_node ) self . num_np = ( npd_size if npd_size > self . num_np else self . num_np ) self . num_hd = ( hdd_size if hdd_size > self . num_hd else self . num_hd ) elif "pointSource" in src . tag : self . has_point_source = True npd_node = src . nodes [ src_taglist . index ( "nodalPlaneDist" ) ] npd_size = len ( npd_node ) hdd_node = src . nodes [ src_taglist . index ( "hypoDepthDist" ) ] hdd_size = len ( hdd_node ) self . num_np = ( npd_size if npd_size > self . num_np else self . num_np ) self . num_hd = ( hdd_size if hdd_size > self . num_hd else self . num_hd ) elif "simpleFaultSource" in src . tag : self . has_simple_fault_geometry = True elif "complexFaultSource" in src . tag : self . has_complex_fault_geometry = True elif "characteristicFaultSource" in src . tag : surface_node = src . nodes [ src_taglist . index ( "surface" ) ] p_size = 0 for surface in surface_node . nodes : if "simpleFaultGeometry" in surface . tag : self . has_simple_fault_geometry = True elif "complexFaultGeometry" in surface . tag : self . has_complex_fault_geometry = True elif "planarSurface" in surface . tag : self . has_planar_geometry = True p_size += 1 self . num_p = p_size if p_size > self . num_p else self . num_p else : pass if "truncGutenbergRichterMFD" in src_taglist : self . has_mfd_gr = True elif "incrementalMFD" in src_taglist : self . has_mfd_incremental = True mfd_node = src . nodes [ src_taglist . index ( "incrementalMFD" ) ] r_size = len ( mfd_node . nodes [ 0 ] . text ) self . num_r = r_size if r_size > self . num_r else self . num_r else : pass | Identify parameters defined in NRML source model file so that shapefile contains only source model specific fields . |
44,329 | def write ( self , destination , source_model , name = None ) : if os . path . exists ( destination ) : os . remove ( destination ) self . destination = destination if name : source_model . name = name output_source_model = Node ( "sourceModel" , { "name" : name } ) dic = groupby ( source_model . sources , operator . itemgetter ( 'tectonicRegion' ) ) for i , ( trt , srcs ) in enumerate ( dic . items ( ) , 1 ) : output_source_model . append ( Node ( 'sourceGroup' , { 'tectonicRegion' : trt , 'name' : 'group %d' % i } , nodes = srcs ) ) print ( "Exporting Source Model to %s" % self . destination ) with open ( self . destination , "wb" ) as f : nrml . write ( [ output_source_model ] , f , "%s" ) | Exports to NRML |
44,330 | def filter_params ( self , src_mod ) : STRIKE_PARAMS [ src_mod . num_np : ] = [ ] DIP_PARAMS [ src_mod . num_np : ] = [ ] RAKE_PARAMS [ src_mod . num_np : ] = [ ] NPW_PARAMS [ src_mod . num_np : ] = [ ] HDEPTH_PARAMS [ src_mod . num_hd : ] = [ ] HDW_PARAMS [ src_mod . num_hd : ] = [ ] PLANES_STRIKES_PARAM [ src_mod . num_p : ] = [ ] PLANES_DIPS_PARAM [ src_mod . num_p : ] = [ ] RATE_PARAMS [ src_mod . num_r : ] = [ ] if src_mod . has_simple_fault_geometry is False : GEOMETRY_PARAMS . remove ( ( 'dip' , 'dip' , 'f' ) ) if ( src_mod . has_simple_fault_geometry is False and src_mod . has_complex_fault_geometry is False and src_mod . has_planar_geometry is False ) : BASE_PARAMS . remove ( ( 'rake' , 'rake' , 'f' ) ) if ( src_mod . has_simple_fault_geometry is False and src_mod . has_complex_fault_geometry is False and src_mod . has_area_source is False and src_mod . has_point_source is False ) : GEOMETRY_PARAMS [ : ] = [ ] if src_mod . has_mfd_incremental is False : MFD_PARAMS . remove ( ( 'binWidth' , 'bin_width' , 'f' ) ) | Remove params uneeded by source_model |
44,331 | def tostring ( node , indent = 4 , nsmap = None ) : out = io . BytesIO ( ) writer = StreamingXMLWriter ( out , indent , nsmap = nsmap ) writer . serialize ( node ) return out . getvalue ( ) | Convert a node into an XML string by using the StreamingXMLWriter . This is useful for testing purposes . |
44,332 | def parse ( source , remove_comments = True , ** kw ) : return ElementTree . parse ( source , SourceLineParser ( ) , ** kw ) | Thin wrapper around ElementTree . parse |
44,333 | def iterparse ( source , events = ( 'end' , ) , remove_comments = True , ** kw ) : return ElementTree . iterparse ( source , events , SourceLineParser ( ) , ** kw ) | Thin wrapper around ElementTree . iterparse |
44,334 | def _displayattrs ( attrib , expandattrs ) : if not attrib : return '' if expandattrs : alist = [ '%s=%r' % item for item in sorted ( attrib . items ( ) ) ] else : alist = list ( attrib ) return '{%s}' % ', ' . join ( alist ) | Helper function to display the attributes of a Node object in lexicographic order . |
44,335 | def _display ( node , indent , expandattrs , expandvals , output ) : attrs = _displayattrs ( node . attrib , expandattrs ) if node . text is None or not expandvals : val = '' elif isinstance ( node . text , str ) : val = ' %s' % repr ( node . text . strip ( ) ) else : val = ' %s' % repr ( node . text ) output . write ( encode ( indent + striptag ( node . tag ) + attrs + val + '\n' ) ) for sub_node in node : _display ( sub_node , indent + ' ' , expandattrs , expandvals , output ) | Core function to display a Node object |
44,336 | def to_literal ( self ) : if not self . nodes : return ( self . tag , self . attrib , self . text , [ ] ) else : return ( self . tag , self . attrib , self . text , list ( map ( to_literal , self . nodes ) ) ) | Convert the node into a literal Python object |
44,337 | def pprint ( self , stream = None , indent = 1 , width = 80 , depth = None ) : pp . pprint ( to_literal ( self ) , stream , indent , width , depth ) | Pretty print the underlying literal Python object |
44,338 | def read_nodes ( fname , filter_elem , nodefactory = Node , remove_comments = True ) : try : for _ , el in iterparse ( fname , remove_comments = remove_comments ) : if filter_elem ( el ) : yield node_from_elem ( el , nodefactory ) el . clear ( ) except Exception : etype , exc , tb = sys . exc_info ( ) msg = str ( exc ) if not str ( fname ) in msg : msg = '%s in %s' % ( msg , fname ) raise_ ( etype , msg , tb ) | Convert an XML file into a lazy iterator over Node objects satifying the given specification i . e . a function element - > boolean . |
44,339 | def node_from_xml ( xmlfile , nodefactory = Node ) : root = parse ( xmlfile ) . getroot ( ) return node_from_elem ( root , nodefactory ) | Convert a . xml file into a Node object . |
44,340 | def node_from_ini ( ini_file , nodefactory = Node , root_name = 'ini' ) : fileobj = open ( ini_file ) if isinstance ( ini_file , str ) else ini_file cfp = configparser . RawConfigParser ( ) cfp . read_file ( fileobj ) root = nodefactory ( root_name ) sections = cfp . sections ( ) for section in sections : params = dict ( cfp . items ( section ) ) root . append ( Node ( section , params ) ) return root | Convert a . ini file into a Node object . |
44,341 | def node_to_ini ( node , output = sys . stdout ) : for subnode in node : output . write ( u'\n[%s]\n' % subnode . tag ) for name , value in sorted ( subnode . attrib . items ( ) ) : output . write ( u'%s=%s\n' % ( name , value ) ) output . flush ( ) | Convert a Node object with the right structure into a . ini file . |
44,342 | def node_copy ( node , nodefactory = Node ) : return nodefactory ( node . tag , node . attrib . copy ( ) , node . text , [ node_copy ( n , nodefactory ) for n in node ] ) | Make a deep copy of the node |
44,343 | def context ( fname , node ) : try : yield node except Exception : etype , exc , tb = sys . exc_info ( ) msg = 'node %s: %s, line %s of %s' % ( striptag ( node . tag ) , exc , getattr ( node , 'lineno' , '?' ) , fname ) raise_ ( etype , msg , tb ) | Context manager managing exceptions and adding line number of the current node and name of the current file to the error message . |
44,344 | def shorten ( self , tag ) : if tag . startswith ( '{' ) : ns , _tag = tag . rsplit ( '}' ) tag = self . nsmap . get ( ns [ 1 : ] , '' ) + _tag return tag | Get the short representation of a fully qualified tag |
44,345 | def _write ( self , text ) : spaces = ' ' * ( self . indent * self . indentlevel ) t = spaces + text . strip ( ) + '\n' if hasattr ( t , 'encode' ) : t = t . encode ( self . encoding , 'xmlcharrefreplace' ) self . stream . write ( t ) | Write text by respecting the current indentlevel |
44,346 | def start_tag ( self , name , attrs = None ) : if not attrs : self . _write ( '<%s>' % name ) else : self . _write ( '<' + name ) for ( name , value ) in sorted ( attrs . items ( ) ) : self . _write ( ' %s=%s' % ( name , quoteattr ( scientificformat ( value ) ) ) ) self . _write ( '>' ) self . indentlevel += 1 | Open an XML tag |
44,347 | def getnodes ( self , name ) : "Return the direct subnodes with name 'name'" for node in self . nodes : if striptag ( node . tag ) == name : yield node | Return the direct subnodes with name name |
44,348 | def append ( self , node ) : "Append a new subnode" if not isinstance ( node , self . __class__ ) : raise TypeError ( 'Expected Node instance, got %r' % node ) self . nodes . append ( node ) | Append a new subnode |
44,349 | def parse_bytes ( self , bytestr , isfinal = True ) : with self . _context ( ) : self . filename = None self . p . Parse ( bytestr , isfinal ) return self . _root | Parse a byte string . If the string is very large split it in chuncks and parse each chunk with isfinal = False then parse an empty chunk with isfinal = True . |
44,350 | def parse_file ( self , file_or_fname ) : with self . _context ( ) : if hasattr ( file_or_fname , 'read' ) : self . filename = getattr ( file_or_fname , 'name' , file_or_fname . __class__ . __name__ ) self . p . ParseFile ( file_or_fname ) else : self . filename = file_or_fname with open ( file_or_fname , 'rb' ) as f : self . p . ParseFile ( f ) return self . _root | Parse a file or a filename |
44,351 | def _get_magnitudes_from_spacing ( self , magnitudes , delta_m ) : min_mag = np . min ( magnitudes ) max_mag = np . max ( magnitudes ) if ( max_mag - min_mag ) < delta_m : raise ValueError ( 'Bin width greater than magnitude range!' ) mag_bins = np . arange ( np . floor ( min_mag ) , np . ceil ( max_mag ) , delta_m ) is_mag = np . logical_and ( mag_bins - max_mag < delta_m , min_mag - mag_bins < delta_m ) mag_bins = mag_bins [ is_mag ] return mag_bins | If a single magnitude spacing is input then create the bins |
44,352 | def _merge_data ( dat1 , dat2 ) : cnt = 0 for key in dat1 : flg1 = len ( dat1 [ key ] ) > 0 flg2 = len ( dat2 [ key ] ) > 0 if flg1 != flg2 : cnt += 1 if cnt : raise Warning ( 'Cannot merge catalogues with different' + ' attributes' ) return None else : for key in dat1 : if isinstance ( dat1 [ key ] , np . ndarray ) : dat1 [ key ] = np . concatenate ( ( dat1 [ key ] , dat2 [ key ] ) , axis = 0 ) elif isinstance ( dat1 [ key ] , list ) : dat1 [ key ] += dat2 [ key ] else : raise ValueError ( 'Unknown type' ) return dat1 | Merge two data dictionaries containing catalogue data |
44,353 | def _get_row_str ( self , i ) : row_data = [ "{:s}" . format ( self . data [ 'eventID' ] [ i ] ) , "{:g}" . format ( self . data [ 'year' ] [ i ] ) , "{:g}" . format ( self . data [ 'month' ] [ i ] ) , "{:g}" . format ( self . data [ 'day' ] [ i ] ) , "{:g}" . format ( self . data [ 'hour' ] [ i ] ) , "{:g}" . format ( self . data [ 'minute' ] [ i ] ) , "{:.1f}" . format ( self . data [ 'second' ] [ i ] ) , "{:.3f}" . format ( self . data [ 'longitude' ] [ i ] ) , "{:.3f}" . format ( self . data [ 'latitude' ] [ i ] ) , "{:.1f}" . format ( self . data [ 'depth' ] [ i ] ) , "{:.1f}" . format ( self . data [ 'magnitude' ] [ i ] ) ] return " " . join ( row_data ) | Returns a string representation of the key information in a row |
44,354 | def load_to_array ( self , keys ) : data = np . empty ( ( len ( self . data [ keys [ 0 ] ] ) , len ( keys ) ) ) for i in range ( 0 , len ( self . data [ keys [ 0 ] ] ) ) : for j , key in enumerate ( keys ) : data [ i , j ] = self . data [ key ] [ i ] return data | This loads the data contained in the catalogue into a numpy array . The method works only for float data |
44,355 | def load_from_array ( self , keys , data_array ) : if len ( keys ) != np . shape ( data_array ) [ 1 ] : raise ValueError ( 'Key list does not match shape of array!' ) for i , key in enumerate ( keys ) : if key in self . INT_ATTRIBUTE_LIST : self . data [ key ] = data_array [ : , i ] . astype ( int ) else : self . data [ key ] = data_array [ : , i ] if key not in self . TOTAL_ATTRIBUTE_LIST : print ( 'Key %s not a recognised catalogue attribute' % key ) self . update_end_year ( ) | This loads the data contained in an array into the catalogue object |
44,356 | def catalogue_mt_filter ( self , mt_table , flag = None ) : if flag is None : flag = np . ones ( self . get_number_events ( ) , dtype = bool ) for comp_val in mt_table : id0 = np . logical_and ( self . data [ 'year' ] . astype ( float ) < comp_val [ 0 ] , self . data [ 'magnitude' ] < comp_val [ 1 ] ) print ( id0 ) flag [ id0 ] = False if not np . all ( flag ) : self . purge_catalogue ( flag ) | Filter the catalogue using a magnitude - time table . The table has two columns and n - rows . |
44,357 | def get_bounding_box ( self ) : return ( np . min ( self . data [ "longitude" ] ) , np . max ( self . data [ "longitude" ] ) , np . min ( self . data [ "latitude" ] ) , np . max ( self . data [ "latitude" ] ) ) | Returns the bounding box of the catalogue |
44,358 | def get_decimal_time ( self ) : return decimal_time ( self . data [ 'year' ] , self . data [ 'month' ] , self . data [ 'day' ] , self . data [ 'hour' ] , self . data [ 'minute' ] , self . data [ 'second' ] ) | Returns the time of the catalogue as a decimal |
44,359 | def sort_catalogue_chronologically ( self ) : dec_time = self . get_decimal_time ( ) idx = np . argsort ( dec_time ) if np . all ( ( idx [ 1 : ] - idx [ : - 1 ] ) > 0. ) : return self . select_catalogue_events ( idx ) | Sorts the catalogue into chronological order |
44,360 | def purge_catalogue ( self , flag_vector ) : id0 = np . where ( flag_vector ) [ 0 ] self . select_catalogue_events ( id0 ) self . get_number_events ( ) | Purges present catalogue with invalid events defined by flag_vector |
44,361 | def select_catalogue_events ( self , id0 ) : for key in self . data : if isinstance ( self . data [ key ] , np . ndarray ) and len ( self . data [ key ] ) > 0 : self . data [ key ] = self . data [ key ] [ id0 ] elif isinstance ( self . data [ key ] , list ) and len ( self . data [ key ] ) > 0 : self . data [ key ] = [ self . data [ key ] [ iloc ] for iloc in id0 ] else : continue | Orders the events in the catalogue according to an indexing vector . |
44,362 | def get_depth_distribution ( self , depth_bins , normalisation = False , bootstrap = None ) : if len ( self . data [ 'depth' ] ) == 0 : raise ValueError ( 'Depths missing in catalogue' ) if len ( self . data [ 'depthError' ] ) == 0 : self . data [ 'depthError' ] = np . zeros ( self . get_number_events ( ) , dtype = float ) return bootstrap_histogram_1D ( self . data [ 'depth' ] , depth_bins , self . data [ 'depthError' ] , normalisation = normalisation , number_bootstraps = bootstrap , boundaries = ( 0. , None ) ) | Gets the depth distribution of the earthquake catalogue to return a single histogram . Depths may be normalised . If uncertainties are found in the catalogue the distrbution may be bootstrap sampled |
44,363 | def get_depth_pmf ( self , depth_bins , default_depth = 5.0 , bootstrap = None ) : if len ( self . data [ 'depth' ] ) == 0 : return PMF ( [ ( 1.0 , default_depth ) ] ) depth_hist = self . get_depth_distribution ( depth_bins , normalisation = True , bootstrap = bootstrap ) depth_hist = np . around ( depth_hist , 3 ) while depth_hist . sum ( ) - 1.0 : depth_hist [ - 1 ] -= depth_hist . sum ( ) - 1.0 depth_hist = np . around ( depth_hist , 3 ) pmf_list = [ ] for iloc , prob in enumerate ( depth_hist ) : pmf_list . append ( ( prob , ( depth_bins [ iloc ] + depth_bins [ iloc + 1 ] ) / 2.0 ) ) return PMF ( pmf_list ) | Returns the depth distribution of the catalogue as a probability mass function |
44,364 | def get_magnitude_depth_distribution ( self , magnitude_bins , depth_bins , normalisation = False , bootstrap = None ) : if len ( self . data [ 'depth' ] ) == 0 : raise ValueError ( 'Depths missing in catalogue' ) if len ( self . data [ 'depthError' ] ) == 0 : self . data [ 'depthError' ] = np . zeros ( self . get_number_events ( ) , dtype = float ) if len ( self . data [ 'sigmaMagnitude' ] ) == 0 : self . data [ 'sigmaMagnitude' ] = np . zeros ( self . get_number_events ( ) , dtype = float ) return bootstrap_histogram_2D ( self . data [ 'magnitude' ] , self . data [ 'depth' ] , magnitude_bins , depth_bins , boundaries = [ ( 0. , None ) , ( None , None ) ] , xsigma = self . data [ 'sigmaMagnitude' ] , ysigma = self . data [ 'depthError' ] , normalisation = normalisation , number_bootstraps = bootstrap ) | Returns a 2 - D magnitude - depth histogram for the catalogue |
44,365 | def get_magnitude_time_distribution ( self , magnitude_bins , time_bins , normalisation = False , bootstrap = None ) : return bootstrap_histogram_2D ( self . get_decimal_time ( ) , self . data [ 'magnitude' ] , time_bins , magnitude_bins , xsigma = np . zeros ( self . get_number_events ( ) ) , ysigma = self . data [ 'sigmaMagnitude' ] , normalisation = normalisation , number_bootstraps = bootstrap ) | Returns a 2 - D histogram indicating the number of earthquakes in a set of time - magnitude bins . Time is in decimal years! |
44,366 | def concatenate ( self , catalogue ) : atts = getattr ( self , 'data' ) attn = getattr ( catalogue , 'data' ) data = _merge_data ( atts , attn ) if data is not None : setattr ( self , 'data' , data ) for attrib in vars ( self ) : atts = getattr ( self , attrib ) attn = getattr ( catalogue , attrib ) if attrib is 'end_year' : setattr ( self , attrib , max ( atts , attn ) ) elif attrib is 'start_year' : setattr ( self , attrib , min ( atts , attn ) ) elif attrib is 'data' : pass elif attrib is 'number_earthquakes' : setattr ( self , attrib , atts + attn ) elif attrib is 'processes' : if atts != attn : raise ValueError ( 'The catalogues cannot be merged' + ' since the they have' + ' a different processing history' ) else : raise ValueError ( 'unknown attribute: %s' % attrib ) self . sort_catalogue_chronologically ( ) | This method attaches one catalogue to the current one |
44,367 | def expose_outputs ( dstore , owner = getpass . getuser ( ) , status = 'complete' ) : oq = dstore [ 'oqparam' ] exportable = set ( ekey [ 0 ] for ekey in export . export ) calcmode = oq . calculation_mode dskeys = set ( dstore ) & exportable dskeys . add ( 'fullreport' ) rlzs = dstore [ 'csm_info' ] . rlzs if len ( rlzs ) > 1 : dskeys . add ( 'realizations' ) if len ( dstore [ 'csm_info/sg_data' ] ) > 1 : dskeys . add ( 'sourcegroups' ) hdf5 = dstore . hdf5 if 'hcurves-stats' in hdf5 or 'hcurves-rlzs' in hdf5 : if oq . hazard_stats ( ) or oq . individual_curves or len ( rlzs ) == 1 : dskeys . add ( 'hcurves' ) if oq . uniform_hazard_spectra : dskeys . add ( 'uhs' ) if oq . hazard_maps : dskeys . add ( 'hmaps' ) if 'avg_losses-stats' in dstore or ( 'avg_losses-rlzs' in dstore and len ( rlzs ) ) : dskeys . add ( 'avg_losses-stats' ) if 'curves-rlzs' in dstore and len ( rlzs ) == 1 : dskeys . add ( 'loss_curves-rlzs' ) if 'curves-stats' in dstore and len ( rlzs ) > 1 : dskeys . add ( 'loss_curves-stats' ) if oq . conditional_loss_poes : if 'loss_curves-stats' in dstore : dskeys . add ( 'loss_maps-stats' ) if 'all_loss_ratios' in dskeys : dskeys . remove ( 'all_loss_ratios' ) if 'ruptures' in dskeys and 'scenario' in calcmode : exportable . remove ( 'ruptures' ) if 'rup_loss_table' in dskeys : dskeys . remove ( 'rup_loss_table' ) if 'hmaps' in dskeys and not oq . hazard_maps : dskeys . remove ( 'hmaps' ) if logs . dbcmd ( 'get_job' , dstore . calc_id ) is None : logs . dbcmd ( 'import_job' , dstore . calc_id , oq . calculation_mode , oq . description + ' [parent]' , owner , status , oq . hazard_calculation_id , dstore . datadir ) keysize = [ ] for key in sorted ( dskeys & exportable ) : try : size_mb = dstore . get_attr ( key , 'nbytes' ) / MB except ( KeyError , AttributeError ) : size_mb = None keysize . append ( ( key , size_mb ) ) ds_size = os . path . getsize ( dstore . filename ) / MB logs . dbcmd ( 'create_outputs' , dstore . calc_id , keysize , ds_size ) | Build a correspondence between the outputs in the datastore and the ones in the database . |
44,368 | def raiseMasterKilled ( signum , _stack ) : if OQ_DISTRIBUTE . startswith ( 'celery' ) : signal . signal ( signal . SIGINT , inhibitSigInt ) msg = 'Received a signal %d' % signum if signum in ( signal . SIGTERM , signal . SIGINT ) : msg = 'The openquake master process was killed manually' if hasattr ( signal , 'SIGHUP' ) : if signum == signal . SIGHUP : if os . getppid ( ) == _PPID : return else : msg = 'The openquake master lost its controlling terminal' raise MasterKilled ( msg ) | When a SIGTERM is received raise the MasterKilled exception with an appropriate error message . |
44,369 | def job_from_file ( job_ini , job_id , username , ** kw ) : hc_id = kw . get ( 'hazard_calculation_id' ) try : oq = readinput . get_oqparam ( job_ini , hc_id = hc_id ) except Exception : logs . dbcmd ( 'finish' , job_id , 'failed' ) raise if 'calculation_mode' in kw : oq . calculation_mode = kw . pop ( 'calculation_mode' ) if 'description' in kw : oq . description = kw . pop ( 'description' ) if 'exposure_file' in kw : fnames = kw . pop ( 'exposure_file' ) . split ( ) if fnames : oq . inputs [ 'exposure' ] = fnames elif 'exposure' in oq . inputs : del oq . inputs [ 'exposure' ] logs . dbcmd ( 'update_job' , job_id , dict ( calculation_mode = oq . calculation_mode , description = oq . description , user_name = username , hazard_calculation_id = hc_id ) ) return oq | Create a full job profile from a job config file . |
44,370 | def check_obsolete_version ( calculation_mode = 'WebUI' ) : if os . environ . get ( 'JENKINS_URL' ) or os . environ . get ( 'TRAVIS' ) : return headers = { 'User-Agent' : 'OpenQuake Engine %s;%s;%s;%s' % ( __version__ , calculation_mode , platform . platform ( ) , config . distribution . oq_distribute ) } try : req = Request ( OQ_API + '/engine/latest' , headers = headers ) data = urlopen ( req , timeout = 1 ) . read ( ) tag_name = json . loads ( decode ( data ) ) [ 'tag_name' ] current = version_triple ( __version__ ) latest = version_triple ( tag_name ) except Exception : return if current < latest : return ( 'Version %s of the engine is available, but you are ' 'still using version %s' % ( tag_name , __version__ ) ) else : return '' | Check if there is a newer version of the engine . |
44,371 | def encode ( val ) : if isinstance ( val , ( list , tuple ) ) : return [ encode ( v ) for v in val ] elif isinstance ( val , str ) : return val . encode ( 'utf-8' ) else : return val | Encode a string assuming the encoding is UTF - 8 . |
44,372 | def raise_ ( tp , value = None , tb = None ) : if value is not None and isinstance ( tp , Exception ) : raise TypeError ( "instance exception may not have a separate value" ) if value is not None : exc = tp ( value ) else : exc = tp if exc . __traceback__ is not tb : raise exc . with_traceback ( tb ) raise exc | A function that matches the Python 2 . x raise statement . This allows re - raising exceptions with the cls value and traceback on Python 2 and 3 . |
44,373 | def plot_pyro ( calc_id = - 1 ) : import matplotlib . pyplot as p dstore = util . read ( calc_id ) sitecol = dstore [ 'sitecol' ] asset_risk = dstore [ 'asset_risk' ] . value pyro , = numpy . where ( dstore [ 'multi_peril' ] [ 'PYRO' ] == 1 ) lons = sitecol . lons [ pyro ] lats = sitecol . lats [ pyro ] p . scatter ( lons , lats , marker = 'o' , color = 'red' ) building_pyro , = numpy . where ( asset_risk [ 'building-PYRO' ] == 1 ) lons = sitecol . lons [ building_pyro ] lats = sitecol . lats [ building_pyro ] p . scatter ( lons , lats , marker = '.' , color = 'green' ) p . show ( ) | Plot the pyroclastic cloud and the assets |
44,374 | def get_resampled_coordinates ( lons , lats ) : num_coords = len ( lons ) assert num_coords == len ( lats ) lons1 = numpy . array ( lons ) lats1 = numpy . array ( lats ) lons2 = numpy . concatenate ( ( lons1 [ 1 : ] , lons1 [ : 1 ] ) ) lats2 = numpy . concatenate ( ( lats1 [ 1 : ] , lats1 [ : 1 ] ) ) distances = geodetic . geodetic_distance ( lons1 , lats1 , lons2 , lats2 ) resampled_lons = [ lons [ 0 ] ] resampled_lats = [ lats [ 0 ] ] for i in range ( num_coords ) : next_point = ( i + 1 ) % num_coords lon1 , lat1 = lons [ i ] , lats [ i ] lon2 , lat2 = lons [ next_point ] , lats [ next_point ] distance = distances [ i ] num_points = int ( distance / UPSAMPLING_STEP_KM ) + 1 if num_points >= 2 : new_lons , new_lats , _ = geodetic . npoints_between ( lon1 , lat1 , 0 , lon2 , lat2 , 0 , num_points ) resampled_lons . extend ( new_lons [ 1 : ] ) resampled_lats . extend ( new_lats [ 1 : ] ) else : resampled_lons . append ( lon2 ) resampled_lats . append ( lat2 ) return numpy . array ( resampled_lons [ : - 1 ] ) , numpy . array ( resampled_lats [ : - 1 ] ) | Resample polygon line segments and return the coordinates of the new vertices . This limits distortions when projecting a polygon onto a spherical surface . |
44,375 | def get_middle_point ( self ) : lons = self . mesh . lons . squeeze ( ) lats = self . mesh . lats . squeeze ( ) depths = self . mesh . depths . squeeze ( ) lon_bar = lons . mean ( ) lat_bar = lats . mean ( ) idx = np . argmin ( ( lons - lon_bar ) ** 2 + ( lats - lat_bar ) ** 2 ) return Point ( lons [ idx ] , lats [ idx ] , depths [ idx ] ) | Compute coordinates of surface middle point . |
44,376 | def modify ( self , modification , parameters ) : if modification not in self . MODIFICATIONS : raise ValueError ( 'Modification %s is not supported by %s' % ( modification , type ( self ) . __name__ ) ) meth = getattr ( self , 'modify_%s' % modification ) meth ( ** parameters ) self . check_constraints ( ) | Apply a single modification to an MFD parameters . |
44,377 | def _get_stddevs ( self , rup , arias , stddev_types , sites ) : stddevs = [ ] if rup . mag < 4.7 : tau = 0.611 elif rup . mag > 7.6 : tau = 0.475 else : tau = 0.611 - 0.047 * ( rup . mag - 4.7 ) sigma1 , sigma2 = self . _get_intra_event_sigmas ( sites ) sigma = np . copy ( sigma1 ) idx = arias >= 0.125 sigma [ idx ] = sigma2 [ idx ] idx = np . logical_and ( arias > 0.013 , arias < 0.125 ) sigma [ idx ] = sigma1 [ idx ] - 0.106 * ( np . log ( arias [ idx ] ) - np . log ( 0.0132 ) ) sigma_total = np . sqrt ( tau ** 2. + sigma ** 2. ) for stddev_type in stddev_types : assert stddev_type in self . DEFINED_FOR_STANDARD_DEVIATION_TYPES if stddev_type == const . StdDev . TOTAL : stddevs . append ( sigma_total ) elif stddev_type == const . StdDev . INTRA_EVENT : stddevs . append ( sigma ) elif stddev_type == const . StdDev . INTER_EVENT : stddevs . append ( tau * np . ones_like ( sites . vs30 ) ) return stddevs | Return standard deviations as defined in table 1 p . 200 . |
44,378 | def _get_intra_event_sigmas ( self , sites ) : sigma1 = 1.18 * np . ones_like ( sites . vs30 ) sigma2 = 0.94 * np . ones_like ( sites . vs30 ) idx1 = np . logical_and ( sites . vs30 >= 360.0 , sites . vs30 < 760.0 ) idx2 = sites . vs30 < 360.0 sigma1 [ idx1 ] = 1.17 sigma2 [ idx1 ] = 0.93 sigma1 [ idx2 ] = 0.96 sigma2 [ idx2 ] = 0.73 return sigma1 , sigma2 | The intra - event term nonlinear and dependent on both the site class and the expected ground motion . In this case the sigma coefficients are determined from the site class as described below Eq . 14 |
44,379 | def _get_pga_on_rock ( self , C , rup , dists ) : return np . exp ( self . _get_magnitude_scaling_term ( C , rup ) + self . _get_path_scaling ( C , dists , rup . mag ) ) | Returns the median PGA on rock which is a sum of the magnitude and distance scaling |
44,380 | def modify ( self , modification , parameters ) : for src in self : src . modify ( modification , parameters ) | Apply a modification to the underlying point sources with the same parameters for all sources |
44,381 | def _compute_mean ( self , C , mag , ztor , rrup ) : gc0 = 0.2418 ci = 0.3846 gch = 0.00607 g4 = 1.7818 ge = 0.554 gm = 1.414 mean = ( gc0 + ci + ztor * gch + C [ 'gc1' ] + gm * mag + C [ 'gc2' ] * ( 10 - mag ) ** 3 + C [ 'gc3' ] * np . log ( rrup + g4 * np . exp ( ge * mag ) ) ) return mean | Compute mean value as in subroutine getGeom in hazgridXnga2 . f |
44,382 | def abort ( job_id ) : job = logs . dbcmd ( 'get_job' , job_id ) if job is None : print ( 'There is no job %d' % job_id ) return elif job . status not in ( 'executing' , 'running' ) : print ( 'Job %d is %s' % ( job . id , job . status ) ) return name = 'oq-job-%d' % job . id for p in psutil . process_iter ( ) : if p . name ( ) == name : try : os . kill ( p . pid , signal . SIGTERM ) logs . dbcmd ( 'set_status' , job . id , 'aborted' ) print ( 'Job %d aborted' % job . id ) except Exception as exc : print ( exc ) break else : logs . dbcmd ( 'set_status' , job . id , 'failed' ) print ( 'Unable to find a process for job %d,' ' setting it as failed' % job . id ) | Abort the given job |
44,383 | def compose ( scripts , name = 'main' , description = None , prog = None , version = None ) : assert len ( scripts ) >= 1 , scripts parentparser = argparse . ArgumentParser ( description = description , add_help = False ) parentparser . add_argument ( '--version' , '-v' , action = 'version' , version = version ) subparsers = parentparser . add_subparsers ( help = 'available subcommands; use %s help <subcmd>' % prog , prog = prog ) def gethelp ( cmd = None ) : if cmd is None : print ( parentparser . format_help ( ) ) return subp = subparsers . _name_parser_map . get ( cmd ) if subp is None : print ( 'No help for unknown command %r' % cmd ) else : print ( subp . format_help ( ) ) help_script = Script ( gethelp , 'help' , help = False ) progname = '%s ' % prog if prog else '' help_script . arg ( 'cmd' , progname + 'subcommand' ) for s in list ( scripts ) + [ help_script ] : subp = subparsers . add_parser ( s . name , description = s . description ) for args , kw in s . all_arguments : subp . add_argument ( * args , ** kw ) subp . set_defaults ( _func = s . func ) def main ( ** kw ) : try : func = kw . pop ( '_func' ) except KeyError : parentparser . print_usage ( ) else : return func ( ** kw ) main . __name__ = name return Script ( main , name , parentparser ) | Collects together different scripts and builds a single script dispatching to the subparsers depending on the first argument i . e . the name of the subparser to invoke . |
44,384 | def _add ( self , name , * args , ** kw ) : argname = list ( self . argdict ) [ self . _argno ] if argname != name : raise NameError ( 'Setting argument %s, but it should be %s' % ( name , argname ) ) self . _group . add_argument ( * args , ** kw ) self . all_arguments . append ( ( args , kw ) ) self . names . append ( name ) self . _argno += 1 | Add an argument to the underlying parser and grow the list . all_arguments and the set . names |
44,385 | def arg ( self , name , help , type = None , choices = None , metavar = None , nargs = None ) : kw = dict ( help = help , type = type , choices = choices , metavar = metavar , nargs = nargs ) default = self . argdict [ name ] if default is not NODEFAULT : kw [ 'nargs' ] = nargs or '?' kw [ 'default' ] = default kw [ 'help' ] = kw [ 'help' ] + ' [default: %s]' % repr ( default ) self . _add ( name , name , ** kw ) | Describe a positional argument |
44,386 | def opt ( self , name , help , abbrev = None , type = None , choices = None , metavar = None , nargs = None ) : kw = dict ( help = help , type = type , choices = choices , metavar = metavar , nargs = nargs ) default = self . argdict [ name ] if default is not NODEFAULT : kw [ 'default' ] = default kw [ 'metavar' ] = metavar or str_choices ( choices ) or str ( default ) abbrev = abbrev or '-' + name [ 0 ] abbrevs = set ( args [ 0 ] for args , kw in self . all_arguments ) longname = '--' + name . replace ( '_' , '-' ) if abbrev == '-h' or abbrev in abbrevs : self . _add ( name , longname , ** kw ) else : self . _add ( name , abbrev , longname , ** kw ) | Describe an option |
44,387 | def flg ( self , name , help , abbrev = None ) : abbrev = abbrev or '-' + name [ 0 ] longname = '--' + name . replace ( '_' , '-' ) self . _add ( name , abbrev , longname , action = 'store_true' , help = help ) | Describe a flag |
44,388 | def check_arguments ( self ) : for name , default in self . argdict . items ( ) : if name not in self . names and default is NODEFAULT : raise NameError ( 'Missing argparse specification for %r' % name ) | Make sure all arguments have a specification |
44,389 | def callfunc ( self , argv = None ) : if not self . checked : self . check_arguments ( ) self . checked = True namespace = self . parentparser . parse_args ( argv or sys . argv [ 1 : ] ) return self . func ( ** vars ( namespace ) ) | Parse the argv list and extract a dictionary of arguments which is then passed to the function underlying the script . |
44,390 | def incremental_value ( self , slip_moment , mmax , mag_value , bbar , dbar ) : delta_m = mmax - mag_value dirac_term = np . zeros_like ( mag_value ) dirac_term [ np . fabs ( delta_m ) < 1.0E-12 ] = 1.0 a_1 = self . _get_a1 ( bbar , dbar , slip_moment , mmax ) return a_1 * ( bbar * np . exp ( bbar * delta_m ) * ( delta_m > 0.0 ) ) + a_1 * dirac_term | Returns the incremental rate of earthquakes with M = mag_value |
44,391 | def _get_a2 ( bbar , dbar , slip_moment , mmax ) : return ( ( dbar - bbar ) / bbar ) * ( slip_moment / _scale_moment ( mmax ) ) | Returns the A2 value defined in II . 4 of Table 2 |
44,392 | def incremental_value ( self , slip_moment , mmax , mag_value , bbar , dbar ) : delta_m = mmax - mag_value a_3 = self . _get_a3 ( bbar , dbar , slip_moment , mmax ) return a_3 * bbar * ( np . exp ( bbar * delta_m ) - 1.0 ) * ( delta_m > 0.0 ) | Returns the incremental rate with Mmax = Mag_value |
44,393 | def get_mmax ( self , mfd_conf , msr , rake , area ) : if mfd_conf [ 'Maximum_Magnitude' ] : self . mmax = mfd_conf [ 'Maximum_Magnitude' ] else : self . mmax = msr . get_median_mag ( area , rake ) if ( 'Maximum_Magnitude_Uncertainty' in mfd_conf and mfd_conf [ 'Maximum_Magnitude_Uncertainty' ] ) : self . mmax_sigma = mfd_conf [ 'Maximum_Magnitude_Uncertainty' ] else : self . mmax_sigma = msr . get_std_dev_mag ( rake ) | Gets the mmax for the fault - reading directly from the config file or using the msr otherwise |
44,394 | def _get_magnitude_term ( self , C , mag ) : lny = C [ 'C1' ] + ( C [ 'C3' ] * ( ( 8.5 - mag ) ** 2. ) ) if mag > 6.3 : return lny + ( - C [ 'H' ] * C [ 'C5' ] ) * ( mag - 6.3 ) else : return lny + C [ 'C2' ] * ( mag - 6.3 ) | Returns the magnitude scaling term . |
44,395 | def _get_style_of_faulting_term ( self , C , rake ) : f_n , f_r = self . _get_fault_type_dummy_variables ( rake ) return C [ 'C6' ] * f_n + C [ 'C7' ] * f_r | Returns the style of faulting factor |
44,396 | def _get_stddevs ( self , C , stddev_types , nsites ) : stddevs = [ ] for stddev_type in stddev_types : assert stddev_type in self . DEFINED_FOR_STANDARD_DEVIATION_TYPES if stddev_type == const . StdDev . TOTAL : stddevs . append ( C [ 'sigma' ] + np . zeros ( nsites , dtype = float ) ) return stddevs | Compute total standard deviation see table 4 . 2 page 50 . |
44,397 | def lon_lat_bins ( bb , coord_bin_width ) : west , south , east , north = bb west = numpy . floor ( west / coord_bin_width ) * coord_bin_width east = numpy . ceil ( east / coord_bin_width ) * coord_bin_width lon_extent = get_longitudinal_extent ( west , east ) lon_bins , _ , _ = npoints_between ( west , 0 , 0 , east , 0 , 0 , numpy . round ( lon_extent / coord_bin_width + 1 ) ) lat_bins = coord_bin_width * numpy . arange ( int ( numpy . floor ( south / coord_bin_width ) ) , int ( numpy . ceil ( north / coord_bin_width ) + 1 ) ) return lon_bins , lat_bins | Define bin edges for disaggregation histograms . |
44,398 | def _digitize_lons ( lons , lon_bins ) : if cross_idl ( lon_bins [ 0 ] , lon_bins [ - 1 ] ) : idx = numpy . zeros_like ( lons , dtype = numpy . int ) for i_lon in range ( len ( lon_bins ) - 1 ) : extents = get_longitudinal_extent ( lons , lon_bins [ i_lon + 1 ] ) lon_idx = extents > 0 if i_lon != 0 : extents = get_longitudinal_extent ( lon_bins [ i_lon ] , lons ) lon_idx &= extents >= 0 idx [ lon_idx ] = i_lon return numpy . array ( idx ) else : return numpy . digitize ( lons , lon_bins ) - 1 | Return indices of the bins to which each value in lons belongs . Takes into account the case in which longitude values cross the international date line . |
44,399 | def mag_pmf ( matrix ) : nmags , ndists , nlons , nlats , neps = matrix . shape mag_pmf = numpy . zeros ( nmags ) for i in range ( nmags ) : mag_pmf [ i ] = numpy . prod ( [ 1. - matrix [ i , j , k , l , m ] for j in range ( ndists ) for k in range ( nlons ) for l in range ( nlats ) for m in range ( neps ) ] ) return 1. - mag_pmf | Fold full disaggregation matrix to magnitude PMF . |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.