idx int64 0 63k | question stringlengths 61 4.03k | target stringlengths 6 1.23k |
|---|---|---|
25,100 | def check_config ( config ) : essential_keys = [ 'number_earthquakes' ] for key in essential_keys : if key not in config : raise ValueError ( 'For Kijko Nonparametric Gaussian the key %s ' 'needs to be set in the configuation' % key ) if config . get ( 'tolerance' , 0.0 ) <= 0.0 : config [ 'tolerance' ] = 0.05 if config . get ( 'maximum_iterations' , 0 ) < 1 : config [ 'maximum_iterations' ] = 100 if config . get ( 'number_samples' , 0 ) < 2 : config [ 'number_samples' ] = 51 return config | Check config file inputs and overwrite bad values with the defaults |
25,101 | def _get_exponential_spaced_values ( mmin , mmax , number_samples ) : lhs = np . exp ( mmin ) + np . arange ( 0. , number_samples - 1. , 1. ) * ( ( np . exp ( mmax ) - np . exp ( mmin ) ) / ( number_samples - 1. ) ) magval = np . hstack ( [ lhs , np . exp ( mmax ) ] ) return np . log ( magval ) | Function to return a set of exponentially spaced values between mmin and mmax |
25,102 | def dbcmd ( action , * args ) : global sock if sock is None : sock = zeromq . Socket ( 'tcp://%s:%s' % ( config . dbserver . host , DBSERVER_PORT ) , zeromq . zmq . REQ , 'connect' ) . __enter__ ( ) res = sock . send ( ( action , ) + args ) if isinstance ( res , parallel . Result ) : return res . get ( ) return res | A dispatcher to the database server . |
25,103 | def _update_log_record ( self , record ) : if not hasattr ( record , 'hostname' ) : record . hostname = '-' if not hasattr ( record , 'job_id' ) : record . job_id = self . job_id | Massage a log record before emitting it . Intended to be used by the custom log handlers defined in this module . |
25,104 | def handle ( job_id , log_level = 'info' , log_file = None ) : handlers = [ LogDatabaseHandler ( job_id ) ] if log_file is None : if not any ( h for h in logging . root . handlers if isinstance ( h , logging . StreamHandler ) ) : handlers . append ( LogStreamHandler ( job_id ) ) else : handlers . append ( LogFileHandler ( job_id , log_file ) ) for handler in handlers : logging . root . addHandler ( handler ) init ( job_id , LEVELS . get ( log_level , logging . WARNING ) ) try : yield finally : if ( log_file and log_file != os . devnull and os . path . getsize ( log_file ) == 0 ) : logging . root . warn ( 'The log file %s is empty!?' % log_file ) for handler in handlers : logging . root . removeHandler ( handler ) | Context manager adding and removing log handlers . |
25,105 | def get_median_area ( self , mag , rake ) : length = 10.0 ** ( - 2.57 + 0.62 * mag ) seis_wid = 20.0 if length < seis_wid : return length ** 2. else : return length * seis_wid | The values are a function of magnitude . |
25,106 | def _construct_surface ( lons , lats , upper_depth , lower_depth ) : depths = np . array ( [ np . zeros_like ( lons ) + upper_depth , np . zeros_like ( lats ) + lower_depth ] ) mesh = RectangularMesh ( np . tile ( lons , ( 2 , 1 ) ) , np . tile ( lats , ( 2 , 1 ) ) , depths ) return SimpleFaultSurface ( mesh ) | Utility method that constructs and return a simple fault surface with top edge specified by lons and lats and extending vertically from upper_depth to lower_depth . |
25,107 | def _get_min_distance_to_sub_trench ( lons , lats ) : trench = _construct_surface ( SUB_TRENCH_LONS , SUB_TRENCH_LATS , 0. , 10. ) sites = Mesh ( lons , lats , None ) return np . abs ( trench . get_rx_distance ( sites ) ) | Compute and return minimum distance between subduction trench and points specified by lon and lat |
25,108 | def _get_min_distance_to_volcanic_front ( lons , lats ) : vf = _construct_surface ( VOLCANIC_FRONT_LONS , VOLCANIC_FRONT_LATS , 0. , 10. ) sites = Mesh ( lons , lats , None ) return vf . get_rx_distance ( sites ) | Compute and return minimum distance between volcanic front and points specified by lon and lat . |
25,109 | def _apply_subduction_trench_correction ( mean , x_tr , H , rrup , imt ) : if imt . name == 'PGV' : V1 = 10 ** ( ( - 4.021e-5 * x_tr + 9.905e-3 ) * ( H - 30 ) ) V2 = np . maximum ( 1. , ( 10 ** ( - 0.012 ) ) * ( ( rrup / 300. ) ** 2.064 ) ) corr = V2 if H > 30 : corr *= V1 else : V2 = np . maximum ( 1. , ( 10 ** ( + 0.13 ) ) * ( ( rrup / 300. ) ** 3.2 ) ) corr = V2 if H > 30 : V1 = 10 ** ( ( - 8.1e-5 * x_tr + 2.0e-2 ) * ( H - 30 ) ) corr *= V1 return np . log ( np . exp ( mean ) * corr ) | Implement equation for subduction trench correction as described in equation 3 . 5 . 2 - 1 page 3 - 148 of Technical Reports on National Seismic Hazard Maps for Japan |
25,110 | def _apply_volcanic_front_correction ( mean , x_vf , H , imt ) : V1 = np . zeros_like ( x_vf ) if imt . name == 'PGV' : idx = x_vf <= 75 V1 [ idx ] = 4.28e-5 * x_vf [ idx ] * ( H - 30 ) idx = x_vf > 75 V1 [ idx ] = 3.21e-3 * ( H - 30 ) V1 = 10 ** V1 else : idx = x_vf <= 75 V1 [ idx ] = 7.06e-5 * x_vf [ idx ] * ( H - 30 ) idx = x_vf > 75 V1 [ idx ] = 5.30e-3 * ( H - 30 ) V1 = 10 ** V1 return np . log ( np . exp ( mean ) * V1 ) | Implement equation for volcanic front correction as described in equation 3 . 5 . 2 . - 2 page 3 - 149 of Technical Reports on National Seismic Hazard Maps for Japan |
25,111 | def get_mean_and_stddevs ( self , sites , rup , dists , imt , stddev_types ) : mean = self . _get_mean ( imt , rup . mag , rup . hypo_depth , dists . rrup , d = 0 ) stddevs = self . _get_stddevs ( stddev_types , dists . rrup ) mean = self . _apply_amplification_factor ( mean , sites . vs30 ) return mean , stddevs | Implements equation 3 . 5 . 1 - 1 page 148 for mean value and equation 3 . 5 . 5 - 2 page 151 for total standard deviation . |
25,112 | def _get_mean ( self , imt , mag , hypo_depth , rrup , d ) : mag = min ( mag , 8.3 ) if imt . name == 'PGV' : mean = ( 0.58 * mag + 0.0038 * hypo_depth + d - 1.29 - np . log10 ( rrup + 0.0028 * 10 ** ( 0.5 * mag ) ) - 0.002 * rrup ) else : mean = ( 0.50 * mag + 0.0043 * hypo_depth + d + 0.61 - np . log10 ( rrup + 0.0055 * 10 ** ( 0.5 * mag ) ) - 0.003 * rrup ) mean = np . log10 ( 10 ** ( mean ) / ( g * 100 ) ) return mean | Return mean value as defined in equation 3 . 5 . 1 - 1 page 148 |
25,113 | def _get_stddevs ( self , stddev_types , rrup ) : assert all ( stddev_type in self . DEFINED_FOR_STANDARD_DEVIATION_TYPES for stddev_type in stddev_types ) std = np . zeros_like ( rrup ) std [ rrup <= 20 ] = 0.23 idx = ( rrup > 20 ) & ( rrup <= 30 ) std [ idx ] = 0.23 - 0.03 * np . log10 ( rrup [ idx ] / 20 ) / np . log10 ( 30. / 20. ) std [ rrup > 30 ] = 0.20 std = np . log ( 10 ** std ) return [ std for stddev_type in stddev_types ] | Return standard deviations as defined in equation 3 . 5 . 5 - 2 page 151 |
25,114 | def _get_stddevs ( self , stddev_types , pgv ) : assert all ( stddev_type in self . DEFINED_FOR_STANDARD_DEVIATION_TYPES for stddev_type in stddev_types ) std = np . zeros_like ( pgv ) std [ pgv <= 25 ] = 0.20 idx = ( pgv > 25 ) & ( pgv <= 50 ) std [ idx ] = 0.20 - 0.05 * ( pgv [ idx ] - 25 ) / 25 std [ pgv > 50 ] = 0.15 std = np . log ( 10 ** std ) return [ std for stddev_type in stddev_types ] | Return standard deviations as defined in equation 3 . 5 . 5 - 1 page 151 |
25,115 | def plot_memory ( calc_id = - 1 ) : dstore = util . read ( calc_id ) plots = [ ] for task_name in dstore [ 'task_info' ] : mem = dstore [ 'task_info/' + task_name ] [ 'mem_gb' ] plots . append ( ( task_name , mem ) ) plt = make_figure ( plots ) plt . show ( ) | Plot the memory occupation |
25,116 | def convert_to_array ( pmap , nsites , imtls , inner_idx = 0 ) : lst = [ ] for imt , imls in imtls . items ( ) : for iml in imls : lst . append ( ( '%s-%s' % ( imt , iml ) , F32 ) ) curves = numpy . zeros ( nsites , numpy . dtype ( lst ) ) for sid , pcurve in pmap . items ( ) : curve = curves [ sid ] idx = 0 for imt , imls in imtls . items ( ) : for iml in imls : curve [ '%s-%s' % ( imt , iml ) ] = pcurve . array [ idx , inner_idx ] idx += 1 return curves | Convert the probability map into a composite array with header of the form PGA - 0 . 1 PGA - 0 . 2 ... |
25,117 | def compute_hazard_maps ( curves , imls , poes ) : poes = numpy . array ( poes ) if len ( poes . shape ) == 0 : poes = poes . reshape ( 1 ) if len ( curves . shape ) == 1 : curves = curves . reshape ( ( 1 , ) + curves . shape ) L = curves . shape [ 1 ] if L != len ( imls ) : raise ValueError ( 'The curves have %d levels, %d were passed' % ( L , len ( imls ) ) ) result = [ ] with warnings . catch_warnings ( ) : warnings . simplefilter ( "ignore" ) imls = numpy . log ( numpy . array ( imls [ : : - 1 ] ) ) for curve in curves : curve_cutoff = [ max ( poe , EPSILON ) for poe in curve [ : : - 1 ] ] hmap_val = [ ] for poe in poes : if poe > curve_cutoff [ - 1 ] : hmap_val . append ( 0 ) else : val = numpy . exp ( numpy . interp ( numpy . log ( poe ) , numpy . log ( curve_cutoff ) , imls ) ) hmap_val . append ( val ) result . append ( hmap_val ) return numpy . array ( result ) | Given a set of hazard curve poes interpolate a hazard map at the specified poe . |
25,118 | def make_hmap ( pmap , imtls , poes ) : M , P = len ( imtls ) , len ( poes ) hmap = probability_map . ProbabilityMap . build ( M , P , pmap , dtype = F32 ) if len ( pmap ) == 0 : return hmap for i , imt in enumerate ( imtls ) : curves = numpy . array ( [ pmap [ sid ] . array [ imtls ( imt ) , 0 ] for sid in pmap . sids ] ) data = compute_hazard_maps ( curves , imtls [ imt ] , poes ) for sid , value in zip ( pmap . sids , data ) : array = hmap [ sid ] . array for j , val in enumerate ( value ) : array [ i , j ] = val return hmap | Compute the hazard maps associated to the passed probability map . |
25,119 | def make_uhs ( hmap , info ) : uhs = numpy . zeros ( len ( hmap ) , info [ 'uhs_dt' ] ) for p , poe in enumerate ( info [ 'poes' ] ) : for m , imt in enumerate ( info [ 'imtls' ] ) : if imt . startswith ( ( 'PGA' , 'SA' ) ) : uhs [ str ( poe ) ] [ imt ] = hmap [ : , m , p ] return uhs | Make Uniform Hazard Spectra curves for each location . |
25,120 | def to_array ( self , ebruptures ) : data = [ ] for ebr in ebruptures : rup = ebr . rupture self . cmaker . add_rup_params ( rup ) ruptparams = tuple ( getattr ( rup , param ) for param in self . params ) point = rup . surface . get_middle_point ( ) multi_lons , multi_lats = rup . surface . get_surface_boundaries ( ) bounds = ',' . join ( '((%s))' % ',' . join ( '%.5f %.5f' % ( lon , lat ) for lon , lat in zip ( lons , lats ) ) for lons , lats in zip ( multi_lons , multi_lats ) ) try : rate = ebr . rupture . occurrence_rate except AttributeError : rate = numpy . nan data . append ( ( ebr . serial , ebr . srcidx , ebr . n_occ , rate , rup . mag , point . x , point . y , point . z , rup . surface . get_strike ( ) , rup . surface . get_dip ( ) , rup . rake , 'MULTIPOLYGON(%s)' % decode ( bounds ) ) + ruptparams ) return numpy . array ( data , self . dt ) | Convert a list of ebruptures into an array of dtype RuptureRata . dt |
25,121 | def save ( self , rup_array ) : self . nruptures += len ( rup_array ) offset = len ( self . datastore [ 'rupgeoms' ] ) rup_array . array [ 'gidx1' ] += offset rup_array . array [ 'gidx2' ] += offset previous = self . datastore . get_attr ( 'ruptures' , 'nbytes' , 0 ) self . datastore . extend ( 'ruptures' , rup_array , nbytes = previous + rup_array . nbytes ) self . datastore . extend ( 'rupgeoms' , rup_array . geom ) self . datastore . flush ( ) | Store the ruptures in array format . |
25,122 | def close ( self ) : if 'ruptures' not in self . datastore : return codes = numpy . unique ( self . datastore [ 'ruptures' ] [ 'code' ] ) attr = { 'code_%d' % code : ' ' . join ( cls . __name__ for cls in BaseRupture . types [ code ] ) for code in codes } self . datastore . set_attrs ( 'ruptures' , ** attr ) | Save information about the rupture codes as attributes of the ruptures dataset . |
25,123 | def _compute_nonlinear_magnitude_term ( self , C , mag ) : return self . _compute_linear_magnitude_term ( C , mag ) + C [ "b3" ] * ( ( mag - 7.0 ) ** 2. ) | Computes the non - linear magnitude term |
25,124 | def _compute_magnitude_distance_term ( self , C , rjb , mag ) : rval = np . sqrt ( rjb ** 2. + C [ "h" ] ** 2. ) return ( C [ "b4" ] + C [ "b5" ] * ( mag - 4.5 ) ) * np . log ( rval ) | Returns the magntude dependent distance term |
25,125 | def _get_bnl ( self , C_AMP , vs30 ) : bnl = np . zeros_like ( vs30 ) if np . all ( vs30 >= self . CONSTS [ "Vref" ] ) : return bnl bnl [ vs30 < self . CONSTS [ "v1" ] ] = C_AMP [ "b1sa" ] idx = np . logical_and ( vs30 > self . CONSTS [ "v1" ] , vs30 <= self . CONSTS [ "v2" ] ) if np . any ( idx ) : bnl [ idx ] = ( C_AMP [ "b1sa" ] - C_AMP [ "b2sa" ] ) * ( np . log ( vs30 [ idx ] / self . CONSTS [ "v2" ] ) / np . log ( self . CONSTS [ "v1" ] / self . CONSTS [ "v2" ] ) ) + C_AMP [ "b2sa" ] idx = np . logical_and ( vs30 > self . CONSTS [ "v2" ] , vs30 < self . CONSTS [ "Vref" ] ) if np . any ( idx ) : bnl [ idx ] = C_AMP [ "b2sa" ] * np . log ( vs30 [ idx ] / self . CONSTS [ "Vref" ] ) / np . log ( self . CONSTS [ "v2" ] / self . CONSTS [ "Vref" ] ) return bnl | Gets the nonlinear term given by equation 8 of Atkinson & Boore 2006 |
25,126 | def _get_stddevs ( self , C , stddev_types , stddev_shape ) : stddevs = [ ] for stddev_type in stddev_types : assert stddev_type in self . DEFINED_FOR_STANDARD_DEVIATION_TYPES if stddev_type == const . StdDev . TOTAL : stddevs . append ( C [ "sigtot" ] + np . zeros ( stddev_shape ) ) elif stddev_type == const . StdDev . INTRA_EVENT : stddevs . append ( C [ 'sig2' ] + np . zeros ( stddev_shape ) ) elif stddev_type == const . StdDev . INTER_EVENT : stddevs . append ( C [ 'sig1' ] + np . zeros ( stddev_shape ) ) return stddevs | Returns the standard deviations given in Table 2 |
25,127 | def tidy ( fnames ) : for fname in fnames : try : node = nrml . read ( fname ) except ValueError as err : print ( err ) return with open ( fname + '.bak' , 'wb' ) as f : f . write ( open ( fname , 'rb' ) . read ( ) ) with open ( fname , 'wb' ) as f : nrml . write ( node . nodes , f , writers . FIVEDIGITS , xmlns = node [ 'xmlns' ] ) print ( 'Reformatted %s, original left in %s.bak' % ( fname , fname ) ) | Reformat a NRML file in a canonical form . That also means reducing the precision of the floats to a standard value . If the file is invalid a clear error message is shown . |
25,128 | def to_hdf5 ( input ) : with performance . Monitor ( 'to_hdf5' ) as mon : for input_file in input : if input_file . endswith ( '.npz' ) : output = convert_npz_hdf5 ( input_file , input_file [ : - 3 ] + 'hdf5' ) elif input_file . endswith ( '.xml' ) : output = convert_xml_hdf5 ( input_file , input_file [ : - 3 ] + 'hdf5' ) else : continue print ( 'Generated %s' % output ) print ( mon ) | Convert . xml and . npz files to . hdf5 files . |
25,129 | def get_rup_array ( ebruptures , srcfilter = nofilter ) : if not BaseRupture . _code : BaseRupture . init ( ) rups = [ ] geoms = [ ] nbytes = 0 offset = 0 for ebrupture in ebruptures : rup = ebrupture . rupture mesh = surface_to_array ( rup . surface ) sy , sz = mesh . shape [ 1 : ] assert sy < TWO16 , 'Too many multisurfaces: %d' % sy assert sz < TWO16 , 'The rupture mesh spacing is too small' points = mesh . reshape ( 3 , - 1 ) . T minlon = points [ : , 0 ] . min ( ) minlat = points [ : , 1 ] . min ( ) maxlon = points [ : , 0 ] . max ( ) maxlat = points [ : , 1 ] . max ( ) if srcfilter . integration_distance and len ( srcfilter . close_sids ( ( minlon , minlat , maxlon , maxlat ) , rup . tectonic_region_type , rup . mag ) ) == 0 : continue hypo = rup . hypocenter . x , rup . hypocenter . y , rup . hypocenter . z rate = getattr ( rup , 'occurrence_rate' , numpy . nan ) tup = ( ebrupture . serial , ebrupture . srcidx , ebrupture . grp_id , rup . code , ebrupture . n_occ , rup . mag , rup . rake , rate , minlon , minlat , maxlon , maxlat , hypo , offset , offset + len ( points ) , sy , sz ) offset += len ( points ) rups . append ( tup ) geoms . append ( numpy . array ( [ tuple ( p ) for p in points ] , point3d ) ) nbytes += rupture_dt . itemsize + mesh . nbytes if not rups : return ( ) dic = dict ( geom = numpy . concatenate ( geoms ) , nbytes = nbytes ) return hdf5 . ArrayWrapper ( numpy . array ( rups , rupture_dt ) , dic ) | Convert a list of EBRuptures into a numpy composite array by filtering out the ruptures far away from every site |
25,130 | def sample_cluster ( sources , srcfilter , num_ses , param ) : eb_ruptures = [ ] numpy . random . seed ( sources [ 0 ] . serial ) [ grp_id ] = set ( src . src_group_id for src in sources ) calc_times = AccumDict ( accum = numpy . zeros ( 3 , numpy . float32 ) ) samples = getattr ( sources [ 0 ] , 'samples' , 1 ) tom = getattr ( sources , 'temporal_occurrence_model' ) rate = tom . occurrence_rate time_span = tom . time_span grp_num_occ = numpy . random . poisson ( rate * time_span * samples * num_ses ) rup_counter = { } rup_data = { } eff_ruptures = 0 for rlz_num in range ( grp_num_occ ) : if sources . cluster : for src , _sites in srcfilter ( sources ) : if rlz_num == 0 : eff_ruptures += src . num_ruptures t0 = time . time ( ) rup = src . get_one_rupture ( ) if src . id not in rup_counter : rup_counter [ src . id ] = { } rup_data [ src . id ] = { } if rup . idx not in rup_counter [ src . id ] : rup_counter [ src . id ] [ rup . idx ] = 1 rup_data [ src . id ] [ rup . idx ] = [ rup , src . id , grp_id ] else : rup_counter [ src . id ] [ rup . idx ] += 1 dt = time . time ( ) - t0 calc_times [ src . id ] += numpy . array ( [ len ( rup_data [ src . id ] ) , src . nsites , dt ] ) elif param [ 'src_interdep' ] == 'mutex' : print ( 'Not yet implemented' ) exit ( 0 ) for src_key in rup_data : for rup_key in rup_data [ src_key ] : dat = rup_data [ src_key ] [ rup_key ] cnt = rup_counter [ src_key ] [ rup_key ] ebr = EBRupture ( dat [ 0 ] , dat [ 1 ] , dat [ 2 ] , cnt , samples ) eb_ruptures . append ( ebr ) return eb_ruptures , calc_times , eff_ruptures , grp_id | Yields ruptures generated by a cluster of sources . |
25,131 | def plot_sites ( calc_id = - 1 ) : import matplotlib . pyplot as p dstore = util . read ( calc_id ) sitecol = dstore [ 'sitecol' ] lons , lats = sitecol . lons , sitecol . lats if len ( lons ) > 1 and cross_idl ( * lons ) : lons %= 360 fig , ax = p . subplots ( ) ax . grid ( True ) if 'site_model' in dstore : sm = dstore [ 'site_model' ] sm_lons , sm_lats = sm [ 'lon' ] , sm [ 'lat' ] if len ( sm_lons ) > 1 and cross_idl ( * sm_lons ) : sm_lons %= 360 p . scatter ( sm_lons , sm_lats , marker = '.' , color = 'orange' ) p . scatter ( lons , lats , marker = '+' ) p . show ( ) | Plot the sites |
25,132 | def _get_distance_term ( self , C , rrup , backarc ) : distance_scale = - np . log10 ( np . sqrt ( rrup ** 2 + 3600.0 ) ) distance_scale [ backarc ] += ( C [ "c2" ] * rrup [ backarc ] ) idx = np . logical_not ( backarc ) distance_scale [ idx ] += ( C [ "c1" ] * rrup [ idx ] ) return distance_scale | Returns the distance scaling term which varies depending on whether the site is in the forearc or the backarc |
25,133 | def _get_scaling_term ( self , C , rrup ) : a_f = 0.15 + 0.0007 * rrup a_f [ a_f > 0.35 ] = 0.35 return C [ "af" ] + a_f | Applies the Cascadia correction factor from Table 2 and the positive correction factor given on Page 567 |
25,134 | def _compute_mean ( self , C , mag , rjb ) : m1 = 6.4 r1 = 50. h = 6. R = np . sqrt ( rjb ** 2 + h ** 2 ) R1 = np . sqrt ( r1 ** 2 + h ** 2 ) less_r1 = rjb < r1 ge_r1 = rjb >= r1 mean = ( C [ 'c1' ] + C [ 'c4' ] * ( mag - m1 ) * np . log ( R ) + C [ 'c5' ] * rjb + C [ 'c8' ] * ( 8.5 - mag ) ** 2 ) mean [ less_r1 ] += C [ 'c3' ] * np . log ( R [ less_r1 ] ) mean [ ge_r1 ] += ( C [ 'c3' ] * np . log ( R1 ) + C [ 'c6' ] * ( np . log ( R [ ge_r1 ] ) - np . log ( R1 ) ) ) if mag < m1 : mean += C [ 'c2' ] * ( mag - m1 ) else : mean += C [ 'c7' ] * ( mag - m1 ) return mean | Compute mean value see table 2 . |
25,135 | def get_weichert_factor ( beta , cmag , cyear , end_year ) : if len ( cmag ) > 1 : dmag = ( cmag [ 1 : ] + cmag [ : - 1 ] ) / 2. cval = np . hstack ( [ dmag , cmag [ - 1 ] + ( dmag [ - 1 ] - cmag [ - 2 ] ) ] ) else : return 1.0 / ( end_year - cyear [ 0 ] + 1 ) , None t_f = sum ( np . exp ( - beta * cval ) ) / sum ( ( end_year - cyear + 1 ) * np . exp ( - beta * cval ) ) return t_f , cval | Gets the Weichert adjustment factor for each the magnitude bins |
25,136 | def get_even_magnitude_completeness ( completeness_table , catalogue = None ) : mmax = np . floor ( 10. * np . max ( catalogue . data [ 'magnitude' ] ) ) / 10. check_completeness_table ( completeness_table , catalogue ) cmag = np . hstack ( [ completeness_table [ : , 1 ] , mmax + 0.1 ] ) cyear = np . hstack ( [ completeness_table [ : , 0 ] , completeness_table [ - 1 , 0 ] ] ) if np . shape ( completeness_table ) [ 0 ] == 1 : return completeness_table , 0.1 for iloc in range ( 0 , len ( cmag ) - 1 ) : mrange = np . arange ( np . floor ( 10. * cmag [ iloc ] ) / 10. , ( np . ceil ( 10. * cmag [ iloc + 1 ] ) / 10. ) , 0.1 ) temp_table = np . column_stack ( [ cyear [ iloc ] * np . ones ( len ( mrange ) , dtype = float ) , mrange ] ) if iloc == 0 : completeness_table = np . copy ( temp_table ) else : completeness_table = np . vstack ( [ completeness_table , temp_table ] ) return completeness_table , 0.1 | To make the magnitudes evenly spaced render to a constant 0 . 1 magnitude unit |
25,137 | def unique ( objects , key = None ) : dupl = [ ] for obj , group in itertools . groupby ( sorted ( objects ) , key ) : if sum ( 1 for _ in group ) > 1 : dupl . append ( obj ) if dupl : raise ValueError ( 'Found duplicates %s' % dupl ) return objects | Raise a ValueError if there is a duplicated object otherwise returns the objects as they are . |
25,138 | def sample ( weighted_objects , num_samples , seed ) : weights = [ ] for obj in weighted_objects : w = obj . weight if isinstance ( obj . weight , float ) : weights . append ( w ) else : weights . append ( w [ 'weight' ] ) numpy . random . seed ( seed ) idxs = numpy . random . choice ( len ( weights ) , num_samples , p = weights ) return [ weighted_objects [ idx ] for idx in idxs ] | Take random samples of a sequence of weighted objects |
25,139 | def collect_info ( smlt ) : n = nrml . read ( smlt ) try : blevels = n . logicTree except Exception : raise InvalidFile ( '%s is not a valid source_model_logic_tree_file' % smlt ) paths = collections . defaultdict ( set ) applytosources = collections . defaultdict ( list ) for blevel in blevels : for bset in blevel : if 'applyToSources' in bset . attrib : applytosources [ bset [ 'branchSetID' ] ] . extend ( bset [ 'applyToSources' ] . split ( ) ) for br in bset : with node . context ( smlt , br ) : fnames = unique ( br . uncertaintyModel . text . split ( ) ) paths [ br [ 'branchID' ] ] . update ( get_paths ( smlt , fnames ) ) return Info ( { k : sorted ( v ) for k , v in paths . items ( ) } , applytosources ) | Given a path to a source model logic tree collect all of the path names to the source models it contains and build 1 . a dictionary source model branch ID - > paths 2 . a dictionary source model branch ID - > source IDs in applyToSources |
25,140 | def toml ( uncertainty ) : text = uncertainty . text . strip ( ) if not text . startswith ( '[' ) : text = '[%s]' % text for k , v in uncertainty . attrib . items ( ) : try : v = ast . literal_eval ( v ) except ValueError : v = repr ( v ) text += '\n%s = %s' % ( k , v ) return text | Converts an uncertainty node into a TOML string |
25,141 | def name ( self ) : names = self . names . split ( ) if len ( names ) == 1 : return names [ 0 ] elif len ( names ) == 2 : return ' ' . join ( names ) else : return ' ' . join ( [ names [ 0 ] , '...' , names [ - 1 ] ] ) | Compact representation for the names |
25,142 | def get_skeleton ( self ) : src_groups = [ ] for grp in self . src_groups : sg = copy . copy ( grp ) sg . sources = [ ] src_groups . append ( sg ) return self . __class__ ( self . names , self . weight , self . path , src_groups , self . num_gsim_paths , self . ordinal , self . samples ) | Return an empty copy of the source model i . e . without sources but with the proper attributes for each SourceGroup contained within . |
25,143 | def enumerate_paths ( self ) : for path in self . _enumerate_paths ( [ ] ) : flat_path = [ ] weight = 1.0 while path : path , branch = path weight *= branch . weight flat_path . append ( branch ) yield weight , flat_path [ : : - 1 ] | Generate all possible paths starting from this branch set . |
25,144 | def filter_source ( self , source ) : for key , value in self . filters . items ( ) : if key == 'applyToTectonicRegionType' : if value != source . tectonic_region_type : return False elif key == 'applyToSourceType' : if value == 'area' : if not isinstance ( source , ohs . AreaSource ) : return False elif value == 'point' : if ( not isinstance ( source , ohs . PointSource ) or isinstance ( source , ohs . AreaSource ) ) : return False elif value == 'simpleFault' : if not isinstance ( source , ohs . SimpleFaultSource ) : return False elif value == 'complexFault' : if not isinstance ( source , ohs . ComplexFaultSource ) : return False elif value == 'characteristicFault' : if not isinstance ( source , ohs . CharacteristicFaultSource ) : return False else : raise AssertionError ( "unknown source type '%s'" % value ) elif key == 'applyToSources' : if source and source . source_id not in value : return False else : raise AssertionError ( "unknown filter '%s'" % key ) return True | Apply filters to source and return True if uncertainty should be applied to it . |
25,145 | def _apply_uncertainty_to_geometry ( self , source , value ) : if self . uncertainty_type == 'simpleFaultDipRelative' : source . modify ( 'adjust_dip' , dict ( increment = value ) ) elif self . uncertainty_type == 'simpleFaultDipAbsolute' : source . modify ( 'set_dip' , dict ( dip = value ) ) elif self . uncertainty_type == 'simpleFaultGeometryAbsolute' : trace , usd , lsd , dip , spacing = value source . modify ( 'set_geometry' , dict ( fault_trace = trace , upper_seismogenic_depth = usd , lower_seismogenic_depth = lsd , dip = dip , spacing = spacing ) ) elif self . uncertainty_type == 'complexFaultGeometryAbsolute' : edges , spacing = value source . modify ( 'set_geometry' , dict ( edges = edges , spacing = spacing ) ) elif self . uncertainty_type == 'characteristicFaultGeometryAbsolute' : source . modify ( 'set_geometry' , dict ( surface = value ) ) | Modify source geometry with the uncertainty value value |
25,146 | def _apply_uncertainty_to_mfd ( self , mfd , value ) : if self . uncertainty_type == 'abGRAbsolute' : a , b = value mfd . modify ( 'set_ab' , dict ( a_val = a , b_val = b ) ) elif self . uncertainty_type == 'bGRRelative' : mfd . modify ( 'increment_b' , dict ( value = value ) ) elif self . uncertainty_type == 'maxMagGRRelative' : mfd . modify ( 'increment_max_mag' , dict ( value = value ) ) elif self . uncertainty_type == 'maxMagGRAbsolute' : mfd . modify ( 'set_max_mag' , dict ( value = value ) ) elif self . uncertainty_type == 'incrementalMFDAbsolute' : min_mag , bin_width , occur_rates = value mfd . modify ( 'set_mfd' , dict ( min_mag = min_mag , bin_width = bin_width , occurrence_rates = occur_rates ) ) | Modify mfd object with uncertainty value value . |
25,147 | def gen_source_models ( self , gsim_lt ) : num_gsim_paths = 1 if self . num_samples else gsim_lt . get_num_paths ( ) for i , rlz in enumerate ( self ) : yield LtSourceModel ( rlz . value , rlz . weight , ( 'b1' , ) , [ ] , num_gsim_paths , i , 1 ) | Yield the underlying LtSourceModel multiple times if there is sampling |
25,148 | def on_each_source ( self ) : return ( self . info . applytosources and self . info . applytosources == self . source_ids ) | True if there is an applyToSources for each source . |
25,149 | def parse_tree ( self , tree_node , validate ) : self . info = collect_info ( self . filename ) self . source_ids = collections . defaultdict ( list ) t0 = time . time ( ) for depth , branchinglevel_node in enumerate ( tree_node . nodes ) : self . parse_branchinglevel ( branchinglevel_node , depth , validate ) dt = time . time ( ) - t0 if validate : bname = os . path . basename ( self . filename ) logging . info ( 'Validated %s in %.2f seconds' , bname , dt ) | Parse the whole tree and point root_branchset attribute to the tree s root . |
25,150 | def parse_branchinglevel ( self , branchinglevel_node , depth , validate ) : new_open_ends = set ( ) branchsets = branchinglevel_node . nodes for number , branchset_node in enumerate ( branchsets ) : branchset = self . parse_branchset ( branchset_node , depth , number , validate ) self . parse_branches ( branchset_node , branchset , validate ) if self . root_branchset is None : self . num_paths = 1 self . root_branchset = branchset else : self . apply_branchset ( branchset_node , branchset ) for branch in branchset . branches : new_open_ends . add ( branch ) self . num_paths *= len ( branchset . branches ) if number > 0 : logging . warning ( 'There is a branching level with multiple ' 'branchsets in %s' , self . filename ) self . open_ends . clear ( ) self . open_ends . update ( new_open_ends ) | Parse one branching level . |
25,151 | def parse_branches ( self , branchset_node , branchset , validate ) : weight_sum = 0 branches = branchset_node . nodes values = [ ] for branchnode in branches : weight = ~ branchnode . uncertaintyWeight weight_sum += weight value_node = node_from_elem ( branchnode . uncertaintyModel ) if value_node . text is not None : values . append ( value_node . text . strip ( ) ) if validate : self . validate_uncertainty_value ( value_node , branchnode , branchset ) value = self . parse_uncertainty_value ( value_node , branchset ) branch_id = branchnode . attrib . get ( 'branchID' ) branch = Branch ( branch_id , weight , value ) if branch_id in self . branches : raise LogicTreeError ( branchnode , self . filename , "branchID '%s' is not unique" % branch_id ) self . branches [ branch_id ] = branch branchset . branches . append ( branch ) if abs ( weight_sum - 1.0 ) > pmf . PRECISION : raise LogicTreeError ( branchset_node , self . filename , "branchset weights don't sum up to 1.0" ) if len ( set ( values ) ) < len ( values ) : raise LogicTreeError ( branchset_node , self . filename , "there are duplicate values in uncertaintyModel: " + ' ' . join ( values ) ) | Create and attach branches at branchset_node to branchset . |
25,152 | def sample_path ( self , seed ) : branchset = self . root_branchset branch_ids = [ ] while branchset is not None : [ branch ] = sample ( branchset . branches , 1 , seed ) branch_ids . append ( branch . branch_id ) branchset = branch . child_branchset modelname = self . root_branchset . get_branch_by_id ( branch_ids [ 0 ] ) . value return modelname , branch_ids | Return the model name and a list of branch ids . |
25,153 | def _parse_simple_fault_geometry_surface ( self , node ) : spacing = node [ "spacing" ] usd , lsd , dip = ( ~ node . upperSeismoDepth , ~ node . lowerSeismoDepth , ~ node . dip ) coords = split_coords_2d ( ~ node . LineString . posList ) trace = geo . Line ( [ geo . Point ( * p ) for p in coords ] ) return trace , usd , lsd , dip , spacing | Parses a simple fault geometry surface |
25,154 | def _parse_complex_fault_geometry_surface ( self , node ) : spacing = node [ "spacing" ] edges = [ ] for edge_node in node . nodes : coords = split_coords_3d ( ~ edge_node . LineString . posList ) edges . append ( geo . Line ( [ geo . Point ( * p ) for p in coords ] ) ) return edges , spacing | Parses a complex fault geometry surface |
25,155 | def _parse_planar_geometry_surface ( self , node ) : nodes = [ ] for key in [ "topLeft" , "topRight" , "bottomRight" , "bottomLeft" ] : nodes . append ( geo . Point ( getattr ( node , key ) [ "lon" ] , getattr ( node , key ) [ "lat" ] , getattr ( node , key ) [ "depth" ] ) ) top_left , top_right , bottom_right , bottom_left = tuple ( nodes ) return geo . PlanarSurface . from_corner_points ( top_left , top_right , bottom_right , bottom_left ) | Parses a planar geometry surface |
25,156 | def _validate_simple_fault_geometry ( self , node , _float_re ) : try : coords = split_coords_2d ( ~ node . LineString . posList ) trace = geo . Line ( [ geo . Point ( * p ) for p in coords ] ) except ValueError : trace = [ ] if len ( trace ) : return raise LogicTreeError ( node , self . filename , "'simpleFaultGeometry' node is not valid" ) | Validates a node representation of a simple fault geometry |
25,157 | def _validate_complex_fault_geometry ( self , node , _float_re ) : valid_edges = [ ] for edge_node in node . nodes : try : coords = split_coords_3d ( edge_node . LineString . posList . text ) edge = geo . Line ( [ geo . Point ( * p ) for p in coords ] ) except ValueError : edge = [ ] if len ( edge ) : valid_edges . append ( True ) else : valid_edges . append ( False ) if node [ "spacing" ] and all ( valid_edges ) : return raise LogicTreeError ( node , self . filename , "'complexFaultGeometry' node is not valid" ) | Validates a node representation of a complex fault geometry - this check merely verifies that the format is correct . If the geometry does not conform to the Aki & Richards convention this will not be verified here but will raise an error when the surface is created . |
25,158 | def _validate_planar_fault_geometry ( self , node , _float_re ) : valid_spacing = node [ "spacing" ] for key in [ "topLeft" , "topRight" , "bottomLeft" , "bottomRight" ] : lon = getattr ( node , key ) [ "lon" ] lat = getattr ( node , key ) [ "lat" ] depth = getattr ( node , key ) [ "depth" ] valid_lon = ( lon >= - 180.0 ) and ( lon <= 180.0 ) valid_lat = ( lat >= - 90.0 ) and ( lat <= 90.0 ) valid_depth = ( depth >= 0.0 ) is_valid = valid_lon and valid_lat and valid_depth if not is_valid or not valid_spacing : raise LogicTreeError ( node , self . filename , "'planarFaultGeometry' node is not valid" ) | Validares a node representation of a planar fault geometry |
25,159 | def apply_uncertainties ( self , branch_ids , source_group ) : branchset = self . root_branchset branchsets_and_uncertainties = [ ] branch_ids = list ( branch_ids [ : : - 1 ] ) while branchset is not None : branch = branchset . get_branch_by_id ( branch_ids . pop ( - 1 ) ) if not branchset . uncertainty_type == 'sourceModel' : branchsets_and_uncertainties . append ( ( branchset , branch . value ) ) branchset = branch . child_branchset if not branchsets_and_uncertainties : return source_group sg = copy . deepcopy ( source_group ) sg . applied_uncertainties = [ ] sg . changed = numpy . zeros ( len ( sg . sources ) , int ) for branchset , value in branchsets_and_uncertainties : for s , source in enumerate ( sg . sources ) : changed = branchset . apply_uncertainty ( value , source ) if changed : sg . changed [ s ] += changed sg . applied_uncertainties . append ( ( branchset . uncertainty_type , value ) ) return sg | Parse the path through the source model logic tree and return apply uncertainties function . |
25,160 | def is_one ( self ) : return all ( abs ( v - 1. ) < pmf . PRECISION for v in self . dic . values ( ) ) | Check that all the inner weights are 1 up to the precision |
25,161 | def from_ ( cls , gsim ) : ltbranch = N ( 'logicTreeBranch' , { 'branchID' : 'b1' } , nodes = [ N ( 'uncertaintyModel' , text = str ( gsim ) ) , N ( 'uncertaintyWeight' , text = '1.0' ) ] ) lt = N ( 'logicTree' , { 'logicTreeID' : 'lt1' } , nodes = [ N ( 'logicTreeBranchingLevel' , { 'branchingLevelID' : 'bl1' } , nodes = [ N ( 'logicTreeBranchSet' , { 'applyToTectonicRegionType' : '*' , 'branchSetID' : 'bs1' , 'uncertaintyType' : 'gmpeModel' } , nodes = [ ltbranch ] ) ] ) ] ) return cls ( repr ( gsim ) , [ '*' ] , ltnode = lt ) | Generate a trivial GsimLogicTree from a single GSIM instance . |
25,162 | def check_imts ( self , imts ) : for trt in self . values : for gsim in self . values [ trt ] : for attr in dir ( gsim ) : coeffs = getattr ( gsim , attr ) if not isinstance ( coeffs , CoeffsTable ) : continue for imt in imts : if imt . startswith ( 'SA' ) : try : coeffs [ from_string ( imt ) ] except KeyError : raise ValueError ( '%s is out of the period range defined ' 'for %s' % ( imt , gsim ) ) | Make sure the IMTs are recognized by all GSIMs in the logic tree |
25,163 | def reduce ( self , trts ) : new = object . __new__ ( self . __class__ ) vars ( new ) . update ( vars ( self ) ) if trts != { '*' } : new . branches = [ ] for br in self . branches : branch = BranchTuple ( br . trt , br . id , br . gsim , br . weight , br . trt in trts ) new . branches . append ( branch ) return new | Reduce the GsimLogicTree . |
25,164 | def get_num_branches ( self ) : num = { } for trt , branches in itertools . groupby ( self . branches , operator . attrgetter ( 'trt' ) ) : num [ trt ] = sum ( 1 for br in branches if br . effective ) return num | Return the number of effective branches for tectonic region type as a dictionary . |
25,165 | def get_num_paths ( self ) : num_branches = self . get_num_branches ( ) if not sum ( num_branches . values ( ) ) : return 0 num = 1 for val in num_branches . values ( ) : if val : num *= val return num | Return the effective number of paths in the tree . |
25,166 | def _compute_faulting_style_term ( Frss , pR , Fnss , pN , rake ) : if rake > 30.0 and rake <= 150.0 : return np . power ( Frss , 1 - pR ) * np . power ( Fnss , - pN ) elif rake > - 120.0 and rake <= - 60.0 : return np . power ( Frss , - pR ) * np . power ( Fnss , 1 - pN ) else : return np . power ( Frss , - pR ) * np . power ( Fnss , - pN ) | Compute SHARE faulting style adjustment term . |
25,167 | def _get_stddevs ( self , C , stddev_types , mag , num_sites ) : stddevs = [ ] for _ in stddev_types : if mag < 7.16 : sigma = C [ 'c11' ] + C [ 'c12' ] * mag elif mag >= 7.16 : sigma = C [ 'c13' ] stddevs . append ( np . zeros ( num_sites ) + sigma ) return stddevs | Return total standard deviation as for equation 35 page 1021 . |
25,168 | def _compute_term2 ( self , C , mag , rrup ) : c78_factor = ( C [ 'c7' ] * np . exp ( C [ 'c8' ] * mag ) ) ** 2 R = np . sqrt ( rrup ** 2 + c78_factor ) return C [ 'c4' ] * np . log ( R ) + ( C [ 'c5' ] + C [ 'c6' ] * mag ) * rrup | This computes the term f2 in equation 32 page 1021 |
25,169 | def _compute_term3 ( self , C , rrup ) : f3 = np . zeros_like ( rrup ) idx_between_70_130 = ( rrup > 70 ) & ( rrup <= 130 ) idx_greater_130 = rrup > 130 f3 [ idx_between_70_130 ] = ( C [ 'c9' ] * ( np . log ( rrup [ idx_between_70_130 ] ) - np . log ( 70 ) ) ) f3 [ idx_greater_130 ] = ( C [ 'c9' ] * ( np . log ( rrup [ idx_greater_130 ] ) - np . log ( 70 ) ) + C [ 'c10' ] * ( np . log ( rrup [ idx_greater_130 ] ) - np . log ( 130 ) ) ) return f3 | This computes the term f3 in equation 34 page 1021 but corrected according to the erratum . |
25,170 | def time_window_cutoff ( sw_time , time_cutoff ) : sw_time = np . array ( [ ( time_cutoff / DAYS ) if x > ( time_cutoff / DAYS ) else x for x in sw_time ] ) return ( sw_time ) | Allows for cutting the declustering time window at a specific time outside of which an event of any magnitude is no longer identified as a cluster |
25,171 | def create_geometry ( self , input_geometry , upper_depth , lower_depth ) : self . _check_seismogenic_depths ( upper_depth , lower_depth ) if not isinstance ( input_geometry , Point ) : if not isinstance ( input_geometry , np . ndarray ) : raise ValueError ( 'Unrecognised or unsupported geometry ' 'definition' ) self . geometry = Point ( input_geometry [ 0 ] , input_geometry [ 1 ] ) else : self . geometry = input_geometry | If geometry is defined as a numpy array then create instance of nhlib . geo . point . Point class otherwise if already instance of class accept class |
25,172 | def select_catalogue ( self , selector , distance , selector_type = 'circle' , distance_metric = 'epicentral' , point_depth = None , upper_eq_depth = None , lower_eq_depth = None ) : if selector . catalogue . get_number_events ( ) < 1 : raise ValueError ( 'No events found in catalogue!' ) if 'square' in selector_type : self . select_catalogue_within_cell ( selector , distance , upper_depth = upper_eq_depth , lower_depth = lower_eq_depth ) elif 'circle' in selector_type : self . select_catalogue_within_distance ( selector , distance , distance_metric , point_depth ) else : raise ValueError ( 'Unrecognised selection type for point source!' ) | Selects the catalogue associated to the point source . Effectively a wrapper to the two functions select catalogue within a distance of the point and select catalogue within cell centred on point |
25,173 | def plot ( what , calc_id = - 1 , other_id = None , webapi = False ) : if '?' not in what : raise SystemExit ( 'Missing ? in %r' % what ) prefix , rest = what . split ( '?' , 1 ) assert prefix in 'source_geom hcurves hmaps uhs' , prefix if prefix in 'hcurves hmaps' and 'imt=' not in rest : raise SystemExit ( 'Missing imt= in %r' % what ) elif prefix == 'uhs' and 'imt=' in rest : raise SystemExit ( 'Invalid IMT in %r' % what ) elif prefix in 'hcurves uhs' and 'site_id=' not in rest : what += '&site_id=0' if webapi : xs = [ WebExtractor ( calc_id ) ] if other_id : xs . append ( WebExtractor ( other_id ) ) else : xs = [ Extractor ( calc_id ) ] if other_id : xs . append ( Extractor ( other_id ) ) make_figure = globals ( ) [ 'make_figure_' + prefix ] plt = make_figure ( xs , what ) plt . show ( ) | Generic plotter for local and remote calculations . |
25,174 | def get_mean_and_stddevs ( self , sctx , rctx , dctx , imt , stddev_types ) : return self . kwargs [ str ( imt ) ] . get_mean_and_stddevs ( sctx , rctx , dctx , imt , stddev_types ) | Call the get mean and stddevs of the GMPE for the respective IMT |
25,175 | def plot_ac ( calc_id ) : dstore = util . read ( calc_id ) agg_curve = dstore [ 'agg_curve-rlzs' ] plt = make_figure ( agg_curve ) plt . show ( ) | Aggregate loss curves plotter . |
25,176 | def _get_stddevs ( self , C , distance , stddev_types ) : stddevs = [ ] for stddev_type in stddev_types : assert stddev_type in self . DEFINED_FOR_STANDARD_DEVIATION_TYPES if stddev_type == const . StdDev . TOTAL : sigma = C [ "s1" ] + ( C [ "s2" ] / ( 1.0 + ( ( distance / C [ "s3" ] ) ** 2. ) ) ) stddevs . append ( sigma + np . zeros_like ( distance ) ) return stddevs | Returns the total standard deviation which is a function of distance |
25,177 | def _compute_mean ( self , C , mag , rrup ) : R1 = 90. R2 = 150. m_ref = mag - 4 r1 = R1 + C [ 'c8' ] * m_ref r2 = R2 + C [ 'c11' ] * m_ref assert r1 > 0 assert r2 > 0 g0 = np . log10 ( np . sqrt ( np . minimum ( rrup , r1 ) ** 2 + ( 1 + C [ 'c5' ] * m_ref ) ** 2 ) ) g1 = np . maximum ( np . log10 ( rrup / r1 ) , 0 ) g2 = np . maximum ( np . log10 ( rrup / r2 ) , 0 ) mean = ( C [ 'c0' ] + C [ 'c1' ] * m_ref + C [ 'c2' ] * m_ref ** 2 + ( C [ 'c3' ] + C [ 'c4' ] * m_ref ) * g0 + ( C [ 'c6' ] + C [ 'c7' ] * m_ref ) * g1 + ( C [ 'c9' ] + C [ 'c10' ] * m_ref ) * g2 ) mean = np . log ( ( 10 ** mean ) * 1e-2 / g ) return mean | Compute mean value according to equation 18 page 32 . |
25,178 | def extract ( what , calc_id , webapi = True ) : with performance . Monitor ( 'extract' , measuremem = True ) as mon : if webapi : obj = WebExtractor ( calc_id ) . get ( what ) else : obj = Extractor ( calc_id ) . get ( what ) fname = '%s_%d.hdf5' % ( what . replace ( '/' , '-' ) . replace ( '?' , '-' ) , calc_id ) obj . save ( fname ) print ( 'Saved' , fname ) if mon . duration > 1 : print ( mon ) | Extract an output from the datastore and save it into an . hdf5 file . By default uses the WebAPI otherwise the extraction is done locally . |
25,179 | def get_regionalisation ( self , strain_model ) : self . strain = strain_model self . strain . data [ 'region' ] = np . array ( [ 'IPL' for _ in range ( self . strain . get_number_observations ( ) ) ] , dtype = '|S13' ) self . strain . data [ 'area' ] = np . array ( [ np . nan for _ in range ( self . strain . get_number_observations ( ) ) ] ) regional_model = self . define_kreemer_regionalisation ( ) for polygon in regional_model : self . _point_in_tectonic_region ( polygon ) return self . strain | Gets the tectonic region type for every element inside the strain model |
25,180 | def define_kreemer_regionalisation ( self , north = 90. , south = - 90. , east = 180. , west = - 180. ) : input_data = getlines ( self . filename ) kreemer_polygons = [ ] for line_loc , line in enumerate ( input_data ) : if '>' in line [ 0 ] : polygon_dict = { } primary_data = line [ 2 : ] . rstrip ( '\n' ) primary_data = primary_data . split ( ' ' , 1 ) polygon_dict [ 'region_type' ] = primary_data [ 0 ] . strip ( ' ' ) polygon_dict [ 'area' ] = float ( primary_data [ 1 ] . strip ( ' ' ) ) polygon_dict [ 'cell' ] = _build_kreemer_cell ( input_data , line_loc ) polygon_dict [ 'long_lims' ] = np . array ( [ np . min ( polygon_dict [ 'cell' ] [ : , 0 ] ) , np . max ( polygon_dict [ 'cell' ] [ : , 0 ] ) ] ) polygon_dict [ 'lat_lims' ] = np . array ( [ np . min ( polygon_dict [ 'cell' ] [ : , 1 ] ) , np . max ( polygon_dict [ 'cell' ] [ : , 1 ] ) ] ) polygon_dict [ 'cell' ] = None if polygon_dict [ 'long_lims' ] [ 0 ] >= 180.0 : polygon_dict [ 'long_lims' ] = polygon_dict [ 'long_lims' ] - 360.0 valid_check = [ polygon_dict [ 'long_lims' ] [ 0 ] >= west , polygon_dict [ 'long_lims' ] [ 1 ] <= east , polygon_dict [ 'lat_lims' ] [ 0 ] >= south , polygon_dict [ 'lat_lims' ] [ 1 ] <= north ] if all ( valid_check ) : kreemer_polygons . append ( polygon_dict ) return kreemer_polygons | Applies the regionalisation defined according to the regionalisation typology of Corne Kreemer |
25,181 | def urlextract ( url , fname ) : with urlopen ( url ) as f : data = io . BytesIO ( f . read ( ) ) with zipfile . ZipFile ( data ) as z : try : return z . open ( fname ) except KeyError : zinfo = z . filelist [ 0 ] if zinfo . filename . endswith ( fname ) : return z . open ( zinfo ) else : raise | Download and unzip an archive and extract the underlying fname |
25,182 | def amplify_gmfs ( imts , vs30s , gmfs ) : n = len ( vs30s ) out = [ amplify_ground_shaking ( im . period , vs30s [ i ] , gmfs [ m * n + i ] ) for m , im in enumerate ( imts ) for i in range ( n ) ] return numpy . array ( out ) | Amplify the ground shaking depending on the vs30s |
25,183 | def cholesky ( spatial_cov , cross_corr ) : M , N = spatial_cov . shape [ : 2 ] L = numpy . array ( [ numpy . linalg . cholesky ( spatial_cov [ i ] ) for i in range ( M ) ] ) LLT = [ ] for i in range ( M ) : row = [ numpy . dot ( L [ i ] , L [ j ] . T ) * cross_corr [ i , j ] for j in range ( M ) ] for j in range ( N ) : singlerow = numpy . zeros ( M * N ) for i in range ( M ) : singlerow [ i * N : ( i + 1 ) * N ] = row [ i ] [ j ] LLT . append ( singlerow ) return numpy . linalg . cholesky ( numpy . array ( LLT ) ) | Decompose the spatial covariance and cross correlation matrices . |
25,184 | def build_header ( dtype ) : header = _build_header ( dtype , ( ) ) h = [ ] for col in header : name = '~' . join ( col [ : - 2 ] ) numpytype = col [ - 2 ] shape = col [ - 1 ] coldescr = name if numpytype != 'float32' and not numpytype . startswith ( '|S' ) : coldescr += ':' + numpytype if shape : coldescr += ':' + ':' . join ( map ( str , shape ) ) h . append ( coldescr ) return h | Convert a numpy nested dtype into a list of strings suitable as header of csv file . |
25,185 | def read_composite_array ( fname , sep = ',' ) : r with open ( fname ) as f : header = next ( f ) if header . startswith ( '#' ) : attrs = dict ( parse_comment ( header [ 1 : ] ) ) header = next ( f ) else : attrs = { } transheader = htranslator . read ( header . split ( sep ) ) fields , dtype = parse_header ( transheader ) ts_pairs = [ ] for name in fields : dt = dtype . fields [ name ] [ 0 ] ts_pairs . append ( ( dt . subdtype [ 0 ] . type if dt . subdtype else dt . type , dt . shape ) ) col_ids = list ( range ( 1 , len ( ts_pairs ) + 1 ) ) num_columns = len ( col_ids ) records = [ ] col , col_id = '' , 0 for i , line in enumerate ( f , 2 ) : row = line . split ( sep ) if len ( row ) != num_columns : raise InvalidFile ( 'expected %d columns, found %d in file %s, line %d' % ( num_columns , len ( row ) , fname , i ) ) try : record = [ ] for ( ntype , shape ) , col , col_id in zip ( ts_pairs , row , col_ids ) : record . append ( _cast ( col , ntype , shape , i , fname ) ) records . append ( tuple ( record ) ) except Exception as e : raise InvalidFile ( 'Could not cast %r in file %s, line %d, column %d ' 'using %s: %s' % ( col , fname , i , col_id , ( ntype . __name__ , ) + shape , e ) ) return ArrayWrapper ( numpy . array ( records , dtype ) , attrs ) | r Convert a CSV file with header into an ArrayWrapper object . |
25,186 | def read_array ( fname , sep = ',' ) : r with open ( fname ) as f : records = [ ] for line in f : row = line . split ( sep ) record = [ list ( map ( float , col . split ( ) ) ) for col in row ] records . append ( record ) return numpy . array ( records ) | r Convert a CSV file without header into a numpy array of floats . |
25,187 | def read ( self , names ) : descrs = [ ] for name in names : mo = re . match ( self . short_regex , name ) if mo : idx = mo . lastindex suffix = self . suffix [ idx - 1 ] . replace ( r':\|' , ':|' ) descrs . append ( mo . group ( mo . lastindex ) + suffix + name [ mo . end ( ) : ] ) else : descrs . append ( name ) return descrs | Convert names into descriptions |
25,188 | def write ( self , descrs ) : names = [ ] for descr in descrs : mo = re . match ( self . long_regex , descr ) if mo : names . append ( mo . group ( mo . lastindex ) + descr [ mo . end ( ) : ] ) else : names . append ( descr ) return names | Convert descriptions into names |
25,189 | def save ( self , data , fname , header = None ) : write_csv ( fname , data , self . sep , self . fmt , header ) self . fnames . add ( getattr ( fname , 'name' , fname ) ) | Save data on fname . |
25,190 | def save_block ( self , data , dest ) : write_csv ( dest , data , self . sep , self . fmt , 'no-header' ) | Save data on dest which is file open in a mode |
25,191 | def _get_site_coeffs ( self , sites , imt ) : site_classes = self . get_nehrp_classes ( sites ) is_bedrock = self . is_bedrock ( sites ) if 'E' in site_classes : msg = ( 'Site class E and F not supported by %s' % type ( self ) . __name__ ) warnings . warn ( msg , UserWarning ) a_1 = np . nan * np . ones_like ( sites . vs30 ) a_2 = np . nan * np . ones_like ( sites . vs30 ) sigma = np . nan * np . ones_like ( sites . vs30 ) for key in self . COEFFS_NEHRP . keys ( ) : indices = ( site_classes == key ) & ~ is_bedrock a_1 [ indices ] = self . COEFFS_NEHRP [ key ] [ imt ] [ 'a1' ] a_2 [ indices ] = self . COEFFS_NEHRP [ key ] [ imt ] [ 'a2' ] sigma [ indices ] = self . COEFFS_NEHRP [ key ] [ imt ] [ 'sigma' ] a_1 [ is_bedrock ] = 0. a_2 [ is_bedrock ] = 0. sigma [ is_bedrock ] = 0. return ( a_1 , a_2 , sigma ) | Extracts correct coefficients for each site from Table 5 on p . 208 for each site . |
25,192 | def get_nehrp_classes ( self , sites ) : classes = sorted ( self . NEHRP_VS30_UPPER_BOUNDS . keys ( ) ) bounds = [ self . NEHRP_VS30_UPPER_BOUNDS [ item ] for item in classes ] bounds = np . reshape ( np . array ( bounds ) , ( - 1 , 1 ) ) vs30s = np . reshape ( sites . vs30 , ( 1 , - 1 ) ) site_classes = np . choose ( ( vs30s < bounds ) . sum ( axis = 0 ) - 1 , classes ) return site_classes . astype ( 'object' ) | Site classification threshholds from Section 4 Site correction coefficients p . 205 . Note that site classes E and F are not supported . |
25,193 | def get_valid_users ( request ) : users = [ get_user ( request ) ] if settings . LOCKDOWN and hasattr ( request , 'user' ) : if request . user . is_authenticated : groups = request . user . groups . all ( ) if groups : users = list ( User . objects . filter ( groups__in = groups ) . values_list ( 'username' , flat = True ) ) else : users = [ ] return users | Returns a list of users based on groups membership . Returns a list made of a single user when it is not member of any group . |
25,194 | def get_acl_on ( request ) : acl_on = settings . ACL_ON if settings . LOCKDOWN and hasattr ( request , 'user' ) : if request . user . is_superuser : acl_on = False return acl_on | Returns True if ACL should be honorated returns otherwise False . |
25,195 | def oq_server_context_processor ( request ) : context = { } context [ 'oq_engine_server_url' ] = ( '//' + request . META . get ( 'HTTP_HOST' , 'localhost:8800' ) ) context [ 'oq_engine_version' ] = oqversion context [ 'server_name' ] = settings . SERVER_NAME return context | A custom context processor which allows injection of additional context variables . |
25,196 | def check_webserver_running ( url = "http://localhost:8800" , max_retries = 30 ) : retry = 0 response = '' success = False while response != requests . codes . ok and retry < max_retries : try : response = requests . head ( url , allow_redirects = True ) . status_code success = True except : sleep ( 1 ) retry += 1 if not success : logging . warning ( 'Unable to connect to %s within %s retries' % ( url , max_retries ) ) return success | Returns True if a given URL is responding within a given timeout . |
25,197 | def export_csv ( ekey , dstore ) : name = ekey [ 0 ] + '.csv' try : array = dstore [ ekey [ 0 ] ] . value except AttributeError : return [ ] if len ( array . shape ) == 1 : array = array . reshape ( ( len ( array ) , 1 ) ) return [ write_csv ( dstore . export_path ( name ) , array ) ] | Default csv exporter for arrays stored in the output . hdf5 file |
25,198 | def export_input_zip ( ekey , dstore ) : dest = dstore . export_path ( 'input.zip' ) nbytes = dstore . get_attr ( 'input/zip' , 'nbytes' ) zbytes = dstore [ 'input/zip' ] . value zbytes += b'\x00' * ( nbytes - len ( zbytes ) ) open ( dest , 'wb' ) . write ( zbytes ) return [ dest ] | Export the data in the input_zip dataset as a . zip file |
25,199 | def node_to_point_geometry ( node ) : assert "pointGeometry" in node . tag for subnode in node . nodes : if "Point" in subnode . tag : lon , lat = map ( float , subnode . nodes [ 0 ] . text . split ( ) ) point = Point ( lon , lat ) elif "upperSeismoDepth" in subnode . tag : upper_depth = float_ ( subnode . text ) elif "lowerSeismoDepth" in subnode . tag : lower_depth = float_ ( subnode . text ) else : pass assert lower_depth > upper_depth return point , upper_depth , lower_depth | Reads the node and returns the point geometry upper depth and lower depth |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.