idx int64 0 63k | question stringlengths 61 4.03k | target stringlengths 6 1.23k |
|---|---|---|
25,200 | def node_to_area_geometry ( node ) : assert "areaGeometry" in node . tag for subnode in node . nodes : if "Polygon" in subnode . tag : crds = [ float ( x ) for x in subnode . nodes [ 0 ] . nodes [ 0 ] . nodes [ 0 ] . text . split ( ) ] polygon = Polygon ( [ Point ( crds [ iloc ] , crds [ iloc + 1 ] ) for iloc in range ( 0 , len ( crds ) , 2 ) ] ) elif "upperSeismoDepth" in subnode . tag : upper_depth = float_ ( subnode . text ) elif "lowerSeismoDepth" in subnode . tag : lower_depth = float_ ( subnode . text ) else : pass assert lower_depth > upper_depth return polygon , upper_depth , lower_depth | Reads an area geometry node and returns the polygon upper depth and lower depth |
25,201 | def node_to_simple_fault_geometry ( node ) : assert "simpleFaultGeometry" in node . tag for subnode in node . nodes : if "LineString" in subnode . tag : trace = linestring_node_to_line ( subnode , with_depth = False ) elif "dip" in subnode . tag : dip = float ( subnode . text ) elif "upperSeismoDepth" in subnode . tag : upper_depth = float ( subnode . text ) elif "lowerSeismoDepth" in subnode . tag : lower_depth = float ( subnode . text ) else : pass assert lower_depth > upper_depth return trace , dip , upper_depth , lower_depth | Reads a simple fault geometry node and returns an OpenQuake representation |
25,202 | def node_to_complex_fault_geometry ( node ) : assert "complexFaultGeometry" in node . tag intermediate_edges = [ ] for subnode in node . nodes : if "faultTopEdge" in subnode . tag : top_edge = linestring_node_to_line ( subnode . nodes [ 0 ] , with_depth = True ) elif "intermediateEdge" in subnode . tag : int_edge = linestring_node_to_line ( subnode . nodes [ 0 ] , with_depth = True ) intermediate_edges . append ( int_edge ) elif "faultBottomEdge" in subnode . tag : bottom_edge = linestring_node_to_line ( subnode . nodes [ 0 ] , with_depth = True ) else : pass return [ top_edge ] + intermediate_edges + [ bottom_edge ] | Reads a complex fault geometry node and returns an |
25,203 | def node_to_mfd ( node , taglist ) : if "incrementalMFD" in taglist : mfd = node_to_evenly_discretized ( node . nodes [ taglist . index ( "incrementalMFD" ) ] ) elif "truncGutenbergRichterMFD" in taglist : mfd = node_to_truncated_gr ( node . nodes [ taglist . index ( "truncGutenbergRichterMFD" ) ] ) else : mfd = None return mfd | Reads the node to return a magnitude frequency distribution |
25,204 | def node_to_nodal_planes ( node ) : if not len ( node ) : return None npd_pmf = [ ] for plane in node . nodes : if not all ( plane . attrib [ key ] for key in plane . attrib ) : return None npd = NodalPlane ( float ( plane . attrib [ "strike" ] ) , float ( plane . attrib [ "dip" ] ) , float ( plane . attrib [ "rake" ] ) ) npd_pmf . append ( ( float ( plane . attrib [ "probability" ] ) , npd ) ) return PMF ( npd_pmf ) | Parses the nodal plane distribution to a PMF |
25,205 | def node_to_hdd ( node ) : if not len ( node ) : return None hdds = [ ] for subnode in node . nodes : if not all ( [ subnode . attrib [ key ] for key in [ "depth" , "probability" ] ] ) : return None hdds . append ( ( float ( subnode . attrib [ "probability" ] ) , float ( subnode . attrib [ "depth" ] ) ) ) return PMF ( hdds ) | Parses the node to a hpyocentral depth distribution PMF |
25,206 | def _compute_mean ( self , C , g , mag , hypo_depth , rrup , vs30 , pga_rock , imt ) : if hypo_depth > 100 : hypo_depth = 100 delta = 0.00724 * 10 ** ( 0.507 * mag ) R = np . sqrt ( rrup ** 2 + delta ** 2 ) s_amp = self . _compute_soil_amplification ( C , vs30 , pga_rock , imt ) mean = ( C [ 'c1' ] + C [ 'c2' ] * mag + C [ 'c3' ] * hypo_depth + C [ 'c4' ] * R - g * np . log10 ( R ) + s_amp ) return mean | Compute mean according to equation 1 page 1706 . |
25,207 | def _compute_soil_linear_factor ( cls , pga_rock , imt ) : if imt . period >= 1 : return np . ones_like ( pga_rock ) else : sl = np . zeros_like ( pga_rock ) pga_between_100_500 = ( pga_rock > 100 ) & ( pga_rock < 500 ) pga_greater_equal_500 = pga_rock >= 500 is_SA_between_05_1 = 0.5 < imt . period < 1 is_SA_less_equal_05 = imt . period <= 0.5 if is_SA_between_05_1 : sl [ pga_between_100_500 ] = ( 1 - ( 1. / imt . period - 1 ) * ( pga_rock [ pga_between_100_500 ] - 100 ) / 400 ) sl [ pga_greater_equal_500 ] = 1 - ( 1. / imt . period - 1 ) if is_SA_less_equal_05 or imt . period == 0 : sl [ pga_between_100_500 ] = ( 1 - ( pga_rock [ pga_between_100_500 ] - 100 ) / 400 ) sl [ pga_rock <= 100 ] = 1 return sl | Compute soil linear factor as explained in paragraph Functional Form page 1706 . |
25,208 | def _get_stddevs ( self , C , mag , stddev_types , sites ) : num_sites = sites . vs30 . size sigma_intra = np . zeros ( num_sites ) tau = sigma_intra + C [ 'tau' ] if mag < 5.0 : sigma_intra += C [ 'sigmaM6' ] - C [ 'sigSlope' ] elif 5.0 <= mag < 7.0 : sigma_intra += C [ 'sigmaM6' ] + C [ 'sigSlope' ] * ( mag - 6 ) else : sigma_intra += C [ 'sigmaM6' ] + C [ 'sigSlope' ] std = [ ] for stddev_type in stddev_types : if stddev_type == const . StdDev . TOTAL : std += [ np . sqrt ( sigma_intra ** 2 + tau ** 2 ) ] elif stddev_type == const . StdDev . INTRA_EVENT : std . append ( sigma_intra ) elif stddev_type == const . StdDev . INTER_EVENT : std . append ( tau ) return std | Return standard deviation as defined on page 29 in equation 8a b c and 9 . |
25,209 | def _get_deltas ( self , sites ) : siteclass = sites . siteclass delta_C = np . zeros_like ( siteclass , dtype = np . float ) delta_C [ siteclass == b'C' ] = 1 delta_D = np . zeros_like ( siteclass , dtype = np . float ) delta_D [ siteclass == b'D' ] = 1 return delta_C , delta_D | Return delta s for equation 4 delta_C = 1 for site class C 0 otherwise delta_D = 1 for site class D 0 otherwise |
25,210 | def gen_outputs ( self , riskinput , monitor , epspath = None , hazard = None ) : self . monitor = monitor hazard_getter = riskinput . hazard_getter if hazard is None : with monitor ( 'getting hazard' ) : hazard_getter . init ( ) hazard = hazard_getter . get_hazard ( ) sids = hazard_getter . sids assert len ( sids ) == 1 with monitor ( 'computing risk' , measuremem = False ) : assets_by_taxo = get_assets_by_taxo ( riskinput . assets , epspath ) for rlzi , haz in sorted ( hazard [ sids [ 0 ] ] . items ( ) ) : out = self . get_output ( assets_by_taxo , haz , rlzi ) yield out | Group the assets per taxonomy and compute the outputs by using the underlying riskmodels . Yield one output per realization . |
25,211 | def get_available_mfds ( ) : mfds = { } for fname in os . listdir ( os . path . dirname ( __file__ ) ) : if fname . endswith ( '.py' ) : modname , _ext = os . path . splitext ( fname ) mod = importlib . import_module ( 'openquake.hmtk.faults.mfd.' + modname ) for cls in mod . __dict__ . values ( ) : if inspect . isclass ( cls ) and issubclass ( cls , BaseMFDfromSlip ) : mfds [ cls . __name__ ] = cls return dict ( ( k , mfds [ k ] ) for k in sorted ( mfds ) ) | Returns an ordered dictionary with the available GSIM classes keyed by class name |
25,212 | def check_config ( config , data ) : if 'tolerance' not in config . keys ( ) or not config [ 'tolerance' ] : config [ 'tolerance' ] = 1E-5 if not config . get ( 'maximum_iterations' , None ) : config [ 'maximum_iterations' ] = 1000 mmin_obs = np . min ( data [ 'magnitude' ] ) if config . get ( 'input_mmin' , 0 ) < mmin_obs : config [ 'input_mmin' ] = mmin_obs if fabs ( config [ 'b-value' ] ) < 1E-7 : config [ 'b-value' ] = 1E-7 return config | Checks that the config file contains all required parameters |
25,213 | def disagg_outputs ( value ) : values = value . replace ( ',' , ' ' ) . split ( ) for val in values : if val not in disagg . pmf_map : raise ValueError ( 'Invalid disagg output: %s' % val ) return values | Validate disaggregation outputs . For instance |
25,214 | def gsim ( value ) : if not value . startswith ( '[' ) : value = '[%s]' % value [ ( gsim_name , kwargs ) ] = toml . loads ( value ) . items ( ) minimum_distance = float ( kwargs . pop ( 'minimum_distance' , 0 ) ) if gsim_name == 'FromFile' : return FromFile ( ) try : gsim_class = registry [ gsim_name ] except KeyError : raise ValueError ( 'Unknown GSIM: %s' % gsim_name ) gs = gsim_class ( ** kwargs ) gs . _toml = '\n' . join ( line . strip ( ) for line in value . splitlines ( ) ) gs . minimum_distance = minimum_distance return gs | Convert a string in TOML format into a GSIM instance |
25,215 | def compose ( * validators ) : def composed_validator ( value ) : out = value for validator in reversed ( validators ) : out = validator ( out ) return out composed_validator . __name__ = 'compose(%s)' % ',' . join ( val . __name__ for val in validators ) return composed_validator | Implement composition of validators . For instance |
25,216 | def utf8 ( value ) : r try : if isinstance ( value , bytes ) : return value . decode ( 'utf-8' ) else : return value except Exception : raise ValueError ( 'Not UTF-8: %r' % value ) | r Check that the string is UTF - 8 . Returns an encode bytestring . |
25,217 | def coordinates ( value ) : if not value . strip ( ) : raise ValueError ( 'Empty list of coordinates: %r' % value ) points = [ ] pointset = set ( ) for i , line in enumerate ( value . split ( ',' ) , 1 ) : pnt = point ( line ) if pnt [ : 2 ] in pointset : raise ValueError ( "Found overlapping site #%d, %s" % ( i , line ) ) pointset . add ( pnt [ : 2 ] ) points . append ( pnt ) return points | Convert a non - empty string into a list of lon - lat coordinates . |
25,218 | def wkt_polygon ( value ) : points = [ '%s %s' % ( lon , lat ) for lon , lat , dep in coordinates ( value ) ] points . append ( points [ 0 ] ) return 'POLYGON((%s))' % ', ' . join ( points ) | Convert a string with a comma separated list of coordinates into a WKT polygon by closing the ring . |
25,219 | def check_levels ( imls , imt , min_iml = 1E-10 ) : if len ( imls ) < 1 : raise ValueError ( 'No imls for %s: %s' % ( imt , imls ) ) elif imls != sorted ( imls ) : raise ValueError ( 'The imls for %s are not sorted: %s' % ( imt , imls ) ) elif len ( distinct ( imls ) ) < len ( imls ) : raise ValueError ( "Found duplicated levels for %s: %s" % ( imt , imls ) ) elif imls [ 0 ] == 0 and imls [ 1 ] <= min_iml : raise ValueError ( "The min_iml %s=%s is larger than the second level " "for %s" % ( imt , min_iml , imls ) ) elif imls [ 0 ] == 0 and imls [ 1 ] > min_iml : imls [ 0 ] = min_iml | Raise a ValueError if the given levels are invalid . |
25,220 | def pmf ( value ) : probs = probabilities ( value ) if abs ( 1. - sum ( map ( float , value . split ( ) ) ) ) > 1e-12 : raise ValueError ( 'The probabilities %s do not sum up to 1!' % value ) return [ ( p , i ) for i , p in enumerate ( probs ) ] | Comvert a string into a Probability Mass Function . |
25,221 | def check_weights ( nodes_with_a_weight ) : weights = [ n [ 'weight' ] for n in nodes_with_a_weight ] if abs ( sum ( weights ) - 1. ) > PRECISION : raise ValueError ( 'The weights do not sum up to 1: %s' % weights ) return nodes_with_a_weight | Ensure that the sum of the values is 1 |
25,222 | def ab_values ( value ) : a , b = value . split ( ) return positivefloat ( a ) , float_ ( b ) | a and b values of the GR magniture - scaling relation . a is a positive float b is just a float . |
25,223 | def site_param ( dic ) : new = { } for name , val in dic . items ( ) : if name == 'vs30Type' : new [ 'vs30measured' ] = val == 'measured' elif name not in site . site_param_dt : raise ValueError ( 'Unrecognized parameter %s' % name ) else : new [ name ] = val return new | Convert a dictionary site_model_param - > string into a dictionary of valid casted site parameters . |
25,224 | def check ( cls , dic ) : res = { } for name , text in dic . items ( ) : try : p = getattr ( cls , name ) except AttributeError : logging . warning ( 'Ignored unknown parameter %s' , name ) else : res [ name ] = p . validator ( text ) return res | Convert a dictionary name - > string into a dictionary name - > value by converting the string . If the name does not correspond to a known parameter just ignore it and print a warning . |
25,225 | def from_ ( cls , dic ) : self = cls . __new__ ( cls ) for k , v in dic . items ( ) : setattr ( self , k , ast . literal_eval ( v ) ) return self | Build a new ParamSet from a dictionary of string - valued parameters which are assumed to be already valid . |
25,226 | def validate ( self ) : valids = [ getattr ( self , valid ) for valid in sorted ( dir ( self . __class__ ) ) if valid . startswith ( 'is_valid_' ) ] for is_valid in valids : if not is_valid ( ) : docstring = '\n' . join ( line . strip ( ) for line in is_valid . __doc__ . splitlines ( ) ) doc = docstring . format ( ** vars ( self ) ) raise ValueError ( doc ) | Apply the is_valid methods to self and possibly raise a ValueError . |
25,227 | def _get_magnitude_scaling_term ( self , C , mag ) : if mag < 6.75 : return C [ "a1_lo" ] + C [ "a2_lo" ] * mag + C [ "a3" ] * ( ( 8.5 - mag ) ** 2.0 ) else : return C [ "a1_hi" ] + C [ "a2_hi" ] * mag + C [ "a3" ] * ( ( 8.5 - mag ) ** 2.0 ) | Returns the magnitude scaling term defined in equation 3 |
25,228 | def _get_distance_scaling_term ( self , C , mag , rrup ) : if mag < 6.75 : mag_factor = - ( C [ "b1_lo" ] + C [ "b2_lo" ] * mag ) else : mag_factor = - ( C [ "b1_hi" ] + C [ "b2_hi" ] * mag ) return mag_factor * np . log ( rrup + 10.0 ) + ( C [ "gamma" ] * rrup ) | Returns the magnitude dependent distance scaling term |
25,229 | def _compute_base_term ( self , C , rup , dists ) : c1 = self . CONSTS [ 'c1' ] R = np . sqrt ( dists . rrup ** 2 + self . CONSTS [ 'c4' ] ** 2 ) base_term = ( C [ 'a1' ] + C [ 'a8' ] * ( ( 8.5 - rup . mag ) ** 2 ) + ( C [ 'a2' ] + self . CONSTS [ 'a3' ] * ( rup . mag - c1 ) ) * np . log ( R ) ) if rup . mag <= c1 : return base_term + self . CONSTS [ 'a4' ] * ( rup . mag - c1 ) else : return base_term + self . CONSTS [ 'a5' ] * ( rup . mag - c1 ) | Compute and return base model term that is the first term in equation 1 page 74 . The calculation of this term is explained in paragraph Base Model page 75 . |
25,230 | def _compute_site_response_term ( self , C , imt , sites , pga1100 ) : site_resp_term = np . zeros_like ( sites . vs30 ) vs30_star , _ = self . _compute_vs30_star_factor ( imt , sites . vs30 ) vlin , c , n = C [ 'VLIN' ] , self . CONSTS [ 'c' ] , self . CONSTS [ 'n' ] a10 , b = C [ 'a10' ] , C [ 'b' ] idx = sites . vs30 < vlin arg = vs30_star [ idx ] / vlin site_resp_term [ idx ] = ( a10 * np . log ( arg ) - b * np . log ( pga1100 [ idx ] + c ) + b * np . log ( pga1100 [ idx ] + c * ( arg ** n ) ) ) idx = sites . vs30 >= vlin site_resp_term [ idx ] = ( a10 + b * n ) * np . log ( vs30_star [ idx ] / vlin ) return site_resp_term | Compute and return site response model term that is the fifth term in equation 1 page 74 . |
25,231 | def _compute_hanging_wall_term ( self , C , dists , rup ) : if rup . dip == 90.0 : return np . zeros_like ( dists . rx ) else : idx = dists . rx > 0 Fhw = np . zeros_like ( dists . rx ) Fhw [ idx ] = 1 T1 = np . zeros_like ( dists . rx ) idx1 = ( dists . rjb < 30.0 ) & ( idx ) T1 [ idx1 ] = 1.0 - dists . rjb [ idx1 ] / 30.0 T2 = np . ones_like ( dists . rx ) idx2 = ( ( dists . rx <= rup . width * np . cos ( np . radians ( rup . dip ) ) ) & ( idx ) ) T2 [ idx2 ] = ( 0.5 + dists . rx [ idx2 ] / ( 2 * rup . width * np . cos ( np . radians ( rup . dip ) ) ) ) T3 = np . ones_like ( dists . rx ) idx3 = ( dists . rx < rup . ztor ) & ( idx ) T3 [ idx3 ] = dists . rx [ idx3 ] / rup . ztor if rup . mag <= 6.0 : T4 = 0.0 elif rup . mag > 6 and rup . mag < 7 : T4 = rup . mag - 6 else : T4 = 1.0 if rup . dip >= 30 : T5 = 1.0 - ( rup . dip - 30.0 ) / 60.0 else : T5 = 1.0 return Fhw * C [ 'a14' ] * T1 * T2 * T3 * T4 * T5 | Compute and return hanging wall model term that is the sixth term in equation 1 page 74 . The calculation of this term is explained in paragraph Hanging - Wall Model page 77 . |
25,232 | def _compute_top_of_rupture_depth_term ( self , C , rup ) : if rup . ztor >= 10.0 : return C [ 'a16' ] else : return C [ 'a16' ] * rup . ztor / 10.0 | Compute and return top of rupture depth term that is the seventh term in equation 1 page 74 . The calculation of this term is explained in paragraph Depth - to - Top of Rupture Model page 78 . |
25,233 | def _compute_large_distance_term ( self , C , dists , rup ) : if rup . mag < 5.5 : T6 = 1.0 elif rup . mag >= 5.5 and rup . mag <= 6.5 : T6 = 0.5 * ( 6.5 - rup . mag ) + 0.5 else : T6 = 0.5 large_distance_term = np . zeros_like ( dists . rrup ) idx = dists . rrup >= 100.0 large_distance_term [ idx ] = C [ 'a18' ] * ( dists . rrup [ idx ] - 100.0 ) * T6 return large_distance_term | Compute and return large distance model term that is the 8 - th term in equation 1 page 74 . The calculation of this term is explained in paragraph Large Distance Model page 78 . |
25,234 | def _compute_soil_depth_term ( self , C , imt , z1pt0 , vs30 ) : a21 = self . _compute_a21_factor ( C , imt , z1pt0 , vs30 ) a22 = self . _compute_a22_factor ( imt ) median_z1pt0 = self . _compute_median_z1pt0 ( vs30 ) soil_depth_term = a21 * np . log ( ( z1pt0 + self . CONSTS [ 'c2' ] ) / ( median_z1pt0 + self . CONSTS [ 'c2' ] ) ) idx = z1pt0 >= 200 soil_depth_term [ idx ] += a22 * np . log ( z1pt0 [ idx ] / 200 ) return soil_depth_term | Compute and return soil depth model term that is the 9 - th term in equation 1 page 74 . The calculation of this term is explained in paragraph Soil Depth Model page 79 . |
25,235 | def _get_stddevs ( self , C , C_PGA , pga1100 , rup , sites , stddev_types ) : std_intra = self . _compute_intra_event_std ( C , C_PGA , pga1100 , rup . mag , sites . vs30 , sites . vs30measured ) std_inter = self . _compute_inter_event_std ( C , C_PGA , pga1100 , rup . mag , sites . vs30 ) stddevs = [ ] for stddev_type in stddev_types : assert stddev_type in self . DEFINED_FOR_STANDARD_DEVIATION_TYPES if stddev_type == const . StdDev . TOTAL : stddevs . append ( np . sqrt ( std_intra ** 2 + std_inter ** 2 ) ) elif stddev_type == const . StdDev . INTRA_EVENT : stddevs . append ( std_intra ) elif stddev_type == const . StdDev . INTER_EVENT : stddevs . append ( std_inter ) return stddevs | Return standard deviations as described in paragraph Equations for standard deviation page 81 . |
25,236 | def _compute_inter_event_std ( self , C , C_PGA , pga1100 , mag , vs30 ) : tau_0 = self . _compute_std_0 ( C [ 's3' ] , C [ 's4' ] , mag ) tau_b_pga = self . _compute_std_0 ( C_PGA [ 's3' ] , C_PGA [ 's4' ] , mag ) delta_amp = self . _compute_partial_derivative_site_amp ( C , pga1100 , vs30 ) std_inter = np . sqrt ( tau_0 ** 2 + ( delta_amp ** 2 ) * ( tau_b_pga ** 2 ) + 2 * delta_amp * tau_0 * tau_b_pga * C [ 'rho' ] ) return std_inter | Compute inter event standard deviation equation 25 page 82 . |
25,237 | def _compute_sigma_b ( self , C , mag , vs30measured ) : sigma_0 = self . _compute_sigma_0 ( C , mag , vs30measured ) sigma_amp = self . CONSTS [ 'sigma_amp' ] return np . sqrt ( sigma_0 ** 2 - sigma_amp ** 2 ) | Equation 23 page 81 . |
25,238 | def _compute_sigma_0 ( self , C , mag , vs30measured ) : s1 = np . zeros_like ( vs30measured , dtype = float ) s2 = np . zeros_like ( vs30measured , dtype = float ) idx = vs30measured == 1 s1 [ idx ] = C [ 's1mea' ] s2 [ idx ] = C [ 's2mea' ] idx = vs30measured == 0 s1 [ idx ] = C [ 's1est' ] s2 [ idx ] = C [ 's2est' ] return self . _compute_std_0 ( s1 , s2 , mag ) | Equation 27 page 82 . |
25,239 | def _compute_std_0 ( self , c1 , c2 , mag ) : if mag < 5 : return c1 elif mag >= 5 and mag <= 7 : return c1 + ( c2 - c1 ) * ( mag - 5 ) / 2 else : return c2 | Common part of equations 27 and 28 pag 82 . |
25,240 | def _compute_a21_factor ( self , C , imt , z1pt0 , vs30 ) : e2 = self . _compute_e2_factor ( imt , vs30 ) a21 = e2 . copy ( ) vs30_star , v1 = self . _compute_vs30_star_factor ( imt , vs30 ) median_z1pt0 = self . _compute_median_z1pt0 ( vs30 ) numerator = ( ( C [ 'a10' ] + C [ 'b' ] * self . CONSTS [ 'n' ] ) * np . log ( vs30_star / np . min ( [ v1 , 1000 ] ) ) ) denominator = np . log ( ( z1pt0 + self . CONSTS [ 'c2' ] ) / ( median_z1pt0 + self . CONSTS [ 'c2' ] ) ) idx = numerator + e2 * denominator < 0 a21 [ idx ] = - numerator [ idx ] / denominator [ idx ] idx = vs30 >= 1000 a21 [ idx ] = 0.0 return a21 | Compute and return a21 factor equation 18 page 80 . |
25,241 | def _compute_vs30_star_factor ( self , imt , vs30 ) : v1 = self . _compute_v1_factor ( imt ) vs30_star = vs30 . copy ( ) vs30_star [ vs30_star >= v1 ] = v1 return vs30_star , v1 | Compute and return vs30 star factor equation 5 page 77 . |
25,242 | def _compute_v1_factor ( self , imt ) : if imt . name == "SA" : t = imt . period if t <= 0.50 : v1 = 1500.0 elif t > 0.50 and t <= 1.0 : v1 = np . exp ( 8.0 - 0.795 * np . log ( t / 0.21 ) ) elif t > 1.0 and t < 2.0 : v1 = np . exp ( 6.76 - 0.297 * np . log ( t ) ) else : v1 = 700.0 elif imt . name == "PGA" : v1 = 1500.0 else : v1 = 862.0 return v1 | Compute and return v1 factor equation 6 page 77 . |
25,243 | def _compute_e2_factor ( self , imt , vs30 ) : e2 = np . zeros_like ( vs30 ) if imt . name == "PGV" : period = 1 elif imt . name == "PGA" : period = 0 else : period = imt . period if period < 0.35 : return e2 else : idx = vs30 <= 1000 if period >= 0.35 and period <= 2.0 : e2 [ idx ] = ( - 0.25 * np . log ( vs30 [ idx ] / 1000 ) * np . log ( period / 0.35 ) ) elif period > 2.0 : e2 [ idx ] = ( - 0.25 * np . log ( vs30 [ idx ] / 1000 ) * np . log ( 2.0 / 0.35 ) ) return e2 | Compute and return e2 factor equation 19 page 80 . |
25,244 | def _compute_a22_factor ( self , imt ) : if imt . name == 'PGV' : return 0.0 period = imt . period if period < 2.0 : return 0.0 else : return 0.0625 * ( period - 2.0 ) | Compute and return the a22 factor equation 20 page 80 . |
25,245 | def _compute_mean ( self , C , A1 , A2 , A3 , A4 , A5 , A6 , mag , hypo_depth , rrup , mean , idx ) : mean [ idx ] = ( A1 + A2 * mag + C [ 'C1' ] + C [ 'C2' ] * ( A3 - mag ) ** 3 + C [ 'C3' ] * np . log ( rrup [ idx ] + A4 * np . exp ( A5 * mag ) ) + A6 * hypo_depth ) | Compute mean for subduction interface events as explained in table 2 page 67 . |
25,246 | def _compute_std ( self , C , mag , stddevs , idx ) : if mag > 8.0 : mag = 8.0 for stddev in stddevs : stddev [ idx ] += C [ 'C4' ] + C [ 'C5' ] * mag | Compute total standard deviation as explained in table 2 page 67 . |
25,247 | def _build_basemap ( self ) : if self . config [ 'min_lon' ] >= self . config [ 'max_lon' ] : raise ValueError ( 'Upper limit of long is smaller than lower limit' ) if self . config [ 'min_lon' ] >= self . config [ 'max_lon' ] : raise ValueError ( 'Upper limit of long is smaller than lower limit' ) lowcrnrlat = self . config [ 'min_lat' ] lowcrnrlon = self . config [ 'min_lon' ] uppcrnrlat = self . config [ 'max_lat' ] uppcrnrlon = self . config [ 'max_lon' ] if 'resolution' not in self . config . keys ( ) : self . config [ 'resolution' ] = 'l' lat0 = lowcrnrlat + ( ( uppcrnrlat - lowcrnrlat ) / 2 ) lon0 = lowcrnrlon + ( ( uppcrnrlon - lowcrnrlon ) / 2 ) if ( uppcrnrlat - lowcrnrlat ) >= ( uppcrnrlon - lowcrnrlon ) : fig_aspect = PORTRAIT_ASPECT else : fig_aspect = LANDSCAPE_ASPECT if self . ax is None : self . fig , self . ax = plt . subplots ( figsize = fig_aspect , facecolor = 'w' , edgecolor = 'k' ) else : self . fig = self . ax . get_figure ( ) if self . title : self . ax . set_title ( self . title , fontsize = 16 ) parallels = np . arange ( - 90. , 90. , self . lat_lon_spacing ) meridians = np . arange ( 0. , 360. , self . lat_lon_spacing ) from mpl_toolkits . basemap import Basemap self . m = Basemap ( llcrnrlon = lowcrnrlon , llcrnrlat = lowcrnrlat , urcrnrlon = uppcrnrlon , urcrnrlat = uppcrnrlat , projection = 'stere' , resolution = self . config [ 'resolution' ] , area_thresh = 1000.0 , lat_0 = lat0 , lon_0 = lon0 , ax = self . ax ) self . m . drawcountries ( ) self . m . drawmapboundary ( ) self . m . drawcoastlines ( ) self . m . drawstates ( ) self . m . drawparallels ( parallels , labels = [ 1 , 0 , 0 , 0 ] , fontsize = 12 ) self . m . drawmeridians ( meridians , labels = [ 0 , 0 , 0 , 1 ] , fontsize = 12 ) self . m . fillcontinents ( color = 'wheat' ) | Creates the map according to the input configuration |
25,248 | def savemap ( self , filename , filetype = 'png' , papertype = "a4" ) : self . fig . savefig ( filename , dpi = self . dpi , format = filetype , papertype = papertype ) | Save the figure |
25,249 | def add_source_model ( self , model , area_border = 'k-' , border_width = 1.0 , point_marker = 'ks' , point_size = 2.0 , overlay = False , min_depth = 0. , max_depth = None , alpha = 1.0 ) : for source in model . sources : if isinstance ( source , mtkAreaSource ) : self . _plot_area_source ( source , area_border , border_width ) elif isinstance ( source , mtkPointSource ) : self . _plot_point_source ( source , point_marker , point_size ) elif isinstance ( source , mtkComplexFaultSource ) : self . _plot_complex_fault ( source , area_border , border_width , min_depth , max_depth , alpha ) elif isinstance ( source , mtkSimpleFaultSource ) : self . _plot_simple_fault ( source , area_border , border_width ) else : pass if not overlay : plt . show ( ) | Adds a source model to the map |
25,250 | def add_colour_scaled_points ( self , longitude , latitude , data , shape = 's' , alpha = 1.0 , size = 20 , norm = None , overlay = False ) : if not norm : norm = Normalize ( vmin = np . min ( data ) , vmax = np . max ( data ) ) x , y , = self . m ( longitude , latitude ) mappable = self . m . scatter ( x , y , marker = shape , s = size , c = data , norm = norm , alpha = alpha , linewidths = 0.0 , zorder = 4 ) self . m . colorbar ( mappable = mappable , fig = self . fig , ax = self . ax ) if not overlay : plt . show ( ) | Overlays a set of points on a map with a fixed size but colour scaled according to the data |
25,251 | def add_size_scaled_points ( self , longitude , latitude , data , shape = 'o' , logplot = False , alpha = 1.0 , colour = 'b' , smin = 2.0 , sscale = 2.0 , overlay = False ) : if logplot : data = np . log10 ( data . copy ( ) ) x , y , = self . m ( longitude , latitude ) self . m . scatter ( x , y , marker = shape , s = ( smin + data ** sscale ) , c = colour , alpha = alpha , zorder = 2 ) if not overlay : plt . show ( ) | Plots a set of points with size scaled according to the data |
25,252 | def add_catalogue_cluster ( self , catalogue , vcl , flagvector , cluster_id = None , overlay = True ) : self . add_size_scaled_points ( catalogue . data [ 'longitude' ] , catalogue . data [ 'latitude' ] , catalogue . data [ 'magnitude' ] , shape = "o" , alpha = 0.8 , colour = ( 0.5 , 0.5 , 0.5 ) , smin = 1.0 , sscale = 1.5 , overlay = True ) if cluster_id is None : idx = flagvector == 0 self . add_size_scaled_points ( catalogue . data [ 'longitude' ] [ idx ] , catalogue . data [ 'latitude' ] [ idx ] , catalogue . data [ 'magnitude' ] [ idx ] , shape = "o" , colour = "r" , smin = 1.0 , sscale = 1.5 , overlay = overlay ) return if not isinstance ( cluster_id , collections . Iterable ) : cluster_id = [ cluster_id ] for iloc , clid in enumerate ( cluster_id ) : if iloc == ( len ( cluster_id ) - 1 ) : temp_overlay = overlay else : temp_overlay = True idx = vcl == clid self . add_size_scaled_points ( catalogue . data [ "longitude" ] [ idx ] , catalogue . data [ "latitude" ] [ idx ] , catalogue . data [ "magnitude" ] [ idx ] , shape = "o" , colour = DISSIMILAR_COLOURLIST [ ( iloc + 1 ) % NCOLS ] , smin = 1.0 , sscale = 1.5 , overlay = temp_overlay ) | Creates a plot of a catalogue showing where particular clusters exist |
25,253 | def _get_stddevs ( self , C , mag , stddev_types , num_sites ) : std = C [ 'c16' ] + np . zeros ( num_sites ) if mag < 7.4 : std -= 0.07 * mag else : std -= 0.518 stddevs = [ std for _ in stddev_types ] return stddevs | Return standard deviation as defined in eq . 11 page 319 . |
25,254 | def distinct ( keys ) : known = set ( ) outlist = [ ] for key in keys : if key not in known : outlist . append ( key ) known . add ( key ) return outlist | Return the distinct keys in order . |
25,255 | def split_in_blocks ( sequence , hint , weight = lambda item : 1 , key = nokey ) : if isinstance ( sequence , int ) : return split_in_slices ( sequence , hint ) elif hint in ( 0 , 1 ) and key is nokey : return [ sequence ] elif hint in ( 0 , 1 ) : blocks = [ ] for k , group in groupby ( sequence , key ) . items ( ) : blocks . append ( group ) return blocks items = sorted ( sequence , key = lambda item : ( key ( item ) , weight ( item ) ) ) assert hint > 0 , hint assert len ( items ) > 0 , len ( items ) total_weight = float ( sum ( weight ( item ) for item in items ) ) return block_splitter ( items , math . ceil ( total_weight / hint ) , weight , key ) | Split the sequence in a number of WeightedSequences close to hint . |
25,256 | def gettemp ( content = None , dir = None , prefix = "tmp" , suffix = "tmp" ) : if dir is not None : if not os . path . exists ( dir ) : os . makedirs ( dir ) fh , path = tempfile . mkstemp ( dir = dir , prefix = prefix , suffix = suffix ) _tmp_paths . append ( path ) if content : fh = os . fdopen ( fh , "wb" ) if hasattr ( content , 'encode' ) : content = content . encode ( 'utf8' ) fh . write ( content ) fh . close ( ) return path | Create temporary file with the given content . |
25,257 | def removetmp ( ) : for path in _tmp_paths : if os . path . exists ( path ) : try : os . remove ( path ) except PermissionError : pass | Remove the temporary files created by gettemp |
25,258 | def run_in_process ( code , * args ) : if args : code %= args try : out = subprocess . check_output ( [ sys . executable , '-c' , code ] ) except subprocess . CalledProcessError as exc : print ( exc . cmd [ - 1 ] , file = sys . stderr ) raise if out : return eval ( out , { } , { } ) | Run in an external process the given Python code and return the output as a Python object . If there are arguments then code is taken as a template and traditional string interpolation is performed . |
25,259 | def import_all ( module_or_package ) : already_imported = set ( sys . modules ) mod_or_pkg = importlib . import_module ( module_or_package ) if not hasattr ( mod_or_pkg , '__path__' ) : return set ( sys . modules ) - already_imported [ pkg_path ] = mod_or_pkg . __path__ n = len ( pkg_path ) for cwd , dirs , files in os . walk ( pkg_path ) : if all ( os . path . basename ( f ) != '__init__.py' for f in files ) : continue for f in files : if f . endswith ( '.py' ) : modname = ( module_or_package + cwd [ n : ] . replace ( os . sep , '.' ) + '.' + os . path . basename ( f [ : - 3 ] ) ) importlib . import_module ( modname ) return set ( sys . modules ) - already_imported | If module_or_package is a module just import it ; if it is a package recursively imports all the modules it contains . Returns the names of the modules that were imported as a set . The set can be empty if the modules were already in sys . modules . |
25,260 | def get_array ( array , ** kw ) : for name , value in kw . items ( ) : array = array [ array [ name ] == value ] return array | Extract a subarray by filtering on the given keyword arguments |
25,261 | def not_equal ( array_or_none1 , array_or_none2 ) : if array_or_none1 is None and array_or_none2 is None : return False elif array_or_none1 is None and array_or_none2 is not None : return True elif array_or_none1 is not None and array_or_none2 is None : return True if array_or_none1 . shape != array_or_none2 . shape : return True return ( array_or_none1 != array_or_none2 ) . any ( ) | Compare two arrays that can also be None or have diffent shapes and returns a boolean . |
25,262 | def humansize ( nbytes , suffixes = ( 'B' , 'KB' , 'MB' , 'GB' , 'TB' , 'PB' ) ) : if nbytes == 0 : return '0 B' i = 0 while nbytes >= 1024 and i < len ( suffixes ) - 1 : nbytes /= 1024. i += 1 f = ( '%.2f' % nbytes ) . rstrip ( '0' ) . rstrip ( '.' ) return '%s %s' % ( f , suffixes [ i ] ) | Return file size in a human - friendly format |
25,263 | def deprecated ( func , msg = '' , * args , ** kw ) : msg = '%s.%s has been deprecated. %s' % ( func . __module__ , func . __name__ , msg ) if not hasattr ( func , 'called' ) : warnings . warn ( msg , DeprecationWarning , stacklevel = 2 ) func . called = 0 func . called += 1 return func ( * args , ** kw ) | A family of decorators to mark deprecated functions . |
25,264 | def random_histogram ( counts , nbins , seed ) : numpy . random . seed ( seed ) return numpy . histogram ( numpy . random . random ( counts ) , nbins , ( 0 , 1 ) ) [ 0 ] | Distribute a total number of counts on a set of bins homogenously . |
25,265 | def safeprint ( * args , ** kwargs ) : new_args = [ ] str_encoding = getattr ( sys . stdout , 'encoding' , None ) or 'ascii' for s in args : new_args . append ( s . encode ( 'utf-8' ) . decode ( str_encoding , 'ignore' ) ) return print ( * new_args , ** kwargs ) | Convert and print characters using the proper encoding |
25,266 | def zipfiles ( fnames , archive , mode = 'w' , log = lambda msg : None , cleanup = False ) : prefix = len ( os . path . commonprefix ( [ os . path . dirname ( f ) for f in fnames ] ) ) with zipfile . ZipFile ( archive , mode , zipfile . ZIP_DEFLATED , allowZip64 = True ) as z : for f in fnames : log ( 'Archiving %s' % f ) z . write ( f , f [ prefix : ] ) if cleanup : os . remove ( f ) log ( 'Generated %s' % archive ) return archive | Build a zip archive from the given file names . |
25,267 | def println ( msg ) : sys . stdout . write ( msg ) sys . stdout . flush ( ) sys . stdout . write ( '\x08' * len ( msg ) ) sys . stdout . flush ( ) | Convenience function to print messages on a single line in the terminal |
25,268 | def warn ( msg , * args ) : if not args : sys . stderr . write ( 'WARNING: ' + msg ) else : sys . stderr . write ( 'WARNING: ' + msg % args ) | Print a warning on stderr |
25,269 | def insert ( self , i , item_weight ) : item , weight = item_weight self . _seq . insert ( i , item ) self . weight += weight | Insert an item with the given weight in the sequence |
25,270 | def add ( self , * keys ) : def decorator ( func ) : for key in keys : self [ key ] = func return func return decorator | Return a decorator registering a new implementation for the CallableDict for the given keys . |
25,271 | def _get_magnitude_scaling_term ( self , C , mag ) : dmag = mag - self . CONSTS [ "Mh" ] if mag < self . CONSTS [ "Mh" ] : return C [ "e1" ] + ( C [ "b1" ] * dmag ) + ( C [ "b2" ] * ( dmag ** 2.0 ) ) else : return C [ "e1" ] + ( C [ "b3" ] * dmag ) | Returns the magnitude scaling term of the GMPE described in equation 3 |
25,272 | def _get_distance_scaling_term ( self , C , rval , mag ) : r_adj = np . sqrt ( rval ** 2.0 + C [ "h" ] ** 2.0 ) return ( ( C [ "c1" ] + C [ "c2" ] * ( mag - self . CONSTS [ "Mref" ] ) ) * np . log10 ( r_adj / self . CONSTS [ "Rref" ] ) - ( C [ "c3" ] * ( r_adj - self . CONSTS [ "Rref" ] ) ) ) | Returns the distance scaling term of the GMPE described in equation 2 |
25,273 | def _get_site_amplification_term ( self , C , vs30 ) : return C [ "gamma" ] * np . log10 ( vs30 / self . CONSTS [ "Vref" ] ) | Returns the site amplification term for the case in which Vs30 is used directly |
25,274 | def _get_site_amplification_term ( self , C , vs30 ) : f_s = np . zeros_like ( vs30 ) idx = np . logical_and ( vs30 < 800.0 , vs30 >= 360.0 ) f_s [ idx ] = C [ "eB" ] idx = np . logical_and ( vs30 < 360.0 , vs30 >= 180.0 ) f_s [ idx ] = C [ "eC" ] idx = vs30 < 180.0 f_s [ idx ] = C [ "eD" ] return f_s | Returns the site amplification given Eurocode 8 site classification |
25,275 | def _get_mean ( self , C , rup , dists , sites ) : return ( self . _get_magnitude_scaling_term ( C , rup . mag ) + self . _get_distance_scaling_term ( C , dists . rjb , rup . mag ) + self . _get_site_amplification_term ( C , sites . vs30 ) ) | Returns the mean value of ground motion - noting that in this case the style - of - faulting term is neglected |
25,276 | def build_loss_tables ( dstore ) : oq = dstore [ 'oqparam' ] L = len ( oq . loss_dt ( ) . names ) R = dstore [ 'csm_info' ] . get_num_rlzs ( ) serials = dstore [ 'ruptures' ] [ 'serial' ] idx_by_ser = dict ( zip ( serials , range ( len ( serials ) ) ) ) tbl = numpy . zeros ( ( len ( serials ) , L ) , F32 ) lbr = numpy . zeros ( ( R , L ) , F32 ) for rec in dstore [ 'losses_by_event' ] . value : idx = idx_by_ser [ rec [ 'eid' ] // TWO32 ] tbl [ idx ] += rec [ 'loss' ] lbr [ rec [ 'rlzi' ] ] += rec [ 'loss' ] return tbl , lbr | Compute the total losses by rupture and losses by rlzi . |
25,277 | def html ( header_rows ) : name = 'table%d' % next ( tablecounter ) return HtmlTable ( [ map ( str , row ) for row in header_rows ] , name ) . render ( ) | Convert a list of tuples describing a table into a HTML string |
25,278 | def make_tabs ( tag_ids , tag_status , tag_contents ) : templ = lis = [ ] contents = [ ] for i , ( tag_id , status , tag_content ) in enumerate ( zip ( tag_ids , tag_status , tag_contents ) , 1 ) : mark = '.' if status == 'complete' else '!' lis . append ( '<li><a href="#tabs-%d">%s%s</a></li>' % ( i , tag_id , mark ) ) contents . append ( '<div id="tabs-%d">%s</div>' % ( i , tag_content ) ) return templ % ( '\n' . join ( lis ) , '\n' . join ( contents ) ) | Return a HTML string containing all the tabs we want to display |
25,279 | def make_report ( isodate = 'today' ) : if isodate == 'today' : isodate = date . today ( ) else : isodate = date ( * time . strptime ( isodate , '%Y-%m-%d' ) [ : 3 ] ) isodate1 = isodate + timedelta ( 1 ) tag_ids = [ ] tag_status = [ ] tag_contents = [ ] jobs = dbcmd ( 'fetch' , ALL_JOBS , isodate . isoformat ( ) , isodate1 . isoformat ( ) ) page = '<h2>%d job(s) finished before midnight of %s</h2>' % ( len ( jobs ) , isodate ) for job_id , user , status , ds_calc in jobs : tag_ids . append ( job_id ) tag_status . append ( status ) [ stats ] = dbcmd ( 'fetch' , JOB_STATS , job_id ) ( job_id , user , start_time , stop_time , status , duration ) = stats try : ds = read ( job_id , datadir = os . path . dirname ( ds_calc ) ) txt = view_fullreport ( 'fullreport' , ds ) report = html_parts ( txt ) except Exception as exc : report = dict ( html_title = 'Could not generate report: %s' % cgi . escape ( str ( exc ) , quote = True ) , fragment = '' ) page = report [ 'html_title' ] page += html ( [ stats . _fields , stats ] ) page += report [ 'fragment' ] tag_contents . append ( page ) page = make_tabs ( tag_ids , tag_status , tag_contents ) + ( 'Report last updated: %s' % datetime . now ( ) ) fname = 'jobs-%s.html' % isodate with open ( fname , 'w' ) as f : f . write ( PAGE_TEMPLATE % page ) return fname | Build a HTML report with the computations performed at the given isodate . Return the name of the report which is saved in the current directory . |
25,280 | def scenario_risk ( riskinputs , riskmodel , param , monitor ) : E = param [ 'E' ] L = len ( riskmodel . loss_types ) result = dict ( agg = numpy . zeros ( ( E , L ) , F32 ) , avg = [ ] , all_losses = AccumDict ( accum = { } ) ) for ri in riskinputs : for out in riskmodel . gen_outputs ( ri , monitor , param [ 'epspath' ] ) : r = out . rlzi weight = param [ 'weights' ] [ r ] slc = param [ 'event_slice' ] ( r ) for l , loss_type in enumerate ( riskmodel . loss_types ) : losses = out [ loss_type ] if numpy . product ( losses . shape ) == 0 : continue stats = numpy . zeros ( len ( ri . assets ) , stat_dt ) for a , asset in enumerate ( ri . assets ) : stats [ 'mean' ] [ a ] = losses [ a ] . mean ( ) stats [ 'stddev' ] [ a ] = losses [ a ] . std ( ddof = 1 ) result [ 'avg' ] . append ( ( l , r , asset [ 'ordinal' ] , stats [ a ] ) ) agglosses = losses . sum ( axis = 0 ) result [ 'agg' ] [ slc , l ] += agglosses * weight if param [ 'asset_loss_table' ] : aids = ri . assets [ 'ordinal' ] result [ 'all_losses' ] [ l , r ] += AccumDict ( zip ( aids , losses ) ) return result | Core function for a scenario computation . |
25,281 | def _check_depth_limits ( input_dict ) : if ( 'upper_depth' in input_dict . keys ( ) ) and input_dict [ 'upper_depth' ] : if input_dict [ 'upper_depth' ] < 0. : raise ValueError ( 'Upper seismogenic depth must be positive' ) else : upper_depth = input_dict [ 'upper_depth' ] else : upper_depth = 0.0 if ( 'lower_depth' in input_dict . keys ( ) ) and input_dict [ 'lower_depth' ] : if input_dict [ 'lower_depth' ] < upper_depth : raise ValueError ( 'Lower depth must take a greater value than' ' upper depth!' ) else : lower_depth = input_dict [ 'lower_depth' ] else : lower_depth = np . inf return upper_depth , lower_depth | Returns the default upper and lower depth values if not in dictionary |
25,282 | def _get_decimal_from_datetime ( time ) : temp_seconds = np . float ( time . second ) + ( np . float ( time . microsecond ) / 1.0E6 ) return decimal_time ( np . array ( [ time . year ] , dtype = int ) , np . array ( [ time . month ] , dtype = int ) , np . array ( [ time . day ] , dtype = int ) , np . array ( [ time . hour ] , dtype = int ) , np . array ( [ time . minute ] , dtype = int ) , np . array ( [ temp_seconds ] , dtype = int ) ) | As the decimal time function requires inputs in the form of numpy arrays need to convert each value in the datetime object to a single numpy array |
25,283 | def select_catalogue ( self , valid_id ) : if not np . any ( valid_id ) : output = Catalogue ( ) output . processes = self . catalogue . processes elif np . all ( valid_id ) : if self . copycat : output = deepcopy ( self . catalogue ) else : output = self . catalogue else : if self . copycat : output = deepcopy ( self . catalogue ) else : output = self . catalogue output . purge_catalogue ( valid_id ) return output | Method to post - process the catalogue based on the selection options |
25,284 | def within_polygon ( self , polygon , distance = None , ** kwargs ) : if distance : zone_polygon = polygon . dilate ( distance ) else : zone_polygon = polygon upper_depth , lower_depth = _check_depth_limits ( kwargs ) valid_depth = np . logical_and ( self . catalogue . data [ 'depth' ] >= upper_depth , self . catalogue . data [ 'depth' ] < lower_depth ) catalogue_mesh = Mesh ( self . catalogue . data [ 'longitude' ] , self . catalogue . data [ 'latitude' ] , self . catalogue . data [ 'depth' ] ) valid_id = np . logical_and ( valid_depth , zone_polygon . intersects ( catalogue_mesh ) ) return self . select_catalogue ( valid_id ) | Select earthquakes within polygon |
25,285 | def circular_distance_from_point ( self , point , distance , ** kwargs ) : if kwargs [ 'distance_type' ] is 'epicentral' : locations = Mesh ( self . catalogue . data [ 'longitude' ] , self . catalogue . data [ 'latitude' ] , np . zeros ( len ( self . catalogue . data [ 'longitude' ] ) , dtype = float ) ) point = Point ( point . longitude , point . latitude , 0.0 ) else : locations = self . catalogue . hypocentres_as_mesh ( ) is_close = point . closer_than ( locations , distance ) return self . select_catalogue ( is_close ) | Select earthquakes within a distance from a Point |
25,286 | def cartesian_square_centred_on_point ( self , point , distance , ** kwargs ) : point_surface = Point ( point . longitude , point . latitude , 0. ) north_point = point_surface . point_at ( distance , 0. , 0. ) east_point = point_surface . point_at ( distance , 0. , 90. ) south_point = point_surface . point_at ( distance , 0. , 180. ) west_point = point_surface . point_at ( distance , 0. , 270. ) is_long = np . logical_and ( self . catalogue . data [ 'longitude' ] >= west_point . longitude , self . catalogue . data [ 'longitude' ] < east_point . longitude ) is_surface = np . logical_and ( is_long , self . catalogue . data [ 'latitude' ] >= south_point . latitude , self . catalogue . data [ 'latitude' ] < north_point . latitude ) upper_depth , lower_depth = _check_depth_limits ( kwargs ) is_valid = np . logical_and ( is_surface , self . catalogue . data [ 'depth' ] >= upper_depth , self . catalogue . data [ 'depth' ] < lower_depth ) return self . select_catalogue ( is_valid ) | Select earthquakes from within a square centered on a point |
25,287 | def within_joyner_boore_distance ( self , surface , distance , ** kwargs ) : upper_depth , lower_depth = _check_depth_limits ( kwargs ) rjb = surface . get_joyner_boore_distance ( self . catalogue . hypocentres_as_mesh ( ) ) is_valid = np . logical_and ( rjb <= distance , np . logical_and ( self . catalogue . data [ 'depth' ] >= upper_depth , self . catalogue . data [ 'depth' ] < lower_depth ) ) return self . select_catalogue ( is_valid ) | Select events within a Joyner - Boore distance of a fault |
25,288 | def within_rupture_distance ( self , surface , distance , ** kwargs ) : upper_depth , lower_depth = _check_depth_limits ( kwargs ) rrupt = surface . get_min_distance ( self . catalogue . hypocentres_as_mesh ( ) ) is_valid = np . logical_and ( rrupt <= distance , np . logical_and ( self . catalogue . data [ 'depth' ] >= upper_depth , self . catalogue . data [ 'depth' ] < lower_depth ) ) return self . select_catalogue ( is_valid ) | Select events within a rupture distance from a fault surface |
25,289 | def within_time_period ( self , start_time = None , end_time = None ) : time_value = self . catalogue . get_decimal_time ( ) if not start_time : if not end_time : return self . catalogue else : start_time = np . min ( self . catalogue . data [ 'year' ] ) else : start_time = _get_decimal_from_datetime ( start_time ) if not end_time : end_time = _get_decimal_from_datetime ( datetime . now ( ) ) else : end_time = _get_decimal_from_datetime ( end_time ) time_value = self . catalogue . get_decimal_time ( ) is_valid = np . logical_and ( time_value >= start_time , time_value < end_time ) return self . select_catalogue ( is_valid ) | Select earthquakes occurring within a given time period |
25,290 | def within_depth_range ( self , lower_depth = None , upper_depth = None ) : if not lower_depth : if not upper_depth : return self . catalogue else : lower_depth = np . inf if not upper_depth : upper_depth = 0.0 is_valid = np . logical_and ( self . catalogue . data [ 'depth' ] >= upper_depth , self . catalogue . data [ 'depth' ] < lower_depth ) return self . select_catalogue ( is_valid ) | Selects events within a specified depth range |
25,291 | def create_cluster_set ( self , vcl ) : num_clust = np . max ( vcl ) cluster_set = [ ] for clid in range ( 0 , num_clust + 1 ) : idx = np . where ( vcl == clid ) [ 0 ] cluster_cat = deepcopy ( self . catalogue ) cluster_cat . select_catalogue_events ( idx ) cluster_set . append ( ( clid , cluster_cat ) ) return dict ( cluster_set ) | For a given catalogue and list of cluster IDs this function splits the catalogue into a dictionary containing an individual catalogue of events within each cluster |
25,292 | def within_bounding_box ( self , limits ) : is_valid = np . logical_and ( self . catalogue . data [ 'longitude' ] >= limits [ 0 ] , np . logical_and ( self . catalogue . data [ 'longitude' ] <= limits [ 2 ] , np . logical_and ( self . catalogue . data [ 'latitude' ] >= limits [ 1 ] , self . catalogue . data [ 'latitude' ] <= limits [ 3 ] ) ) ) return self . select_catalogue ( is_valid ) | Selects the earthquakes within a bounding box . |
25,293 | def get_calc_ids ( datadir = None ) : datadir = datadir or get_datadir ( ) if not os . path . exists ( datadir ) : return [ ] calc_ids = set ( ) for f in os . listdir ( datadir ) : mo = re . match ( CALC_REGEX , f ) if mo : calc_ids . add ( int ( mo . group ( 2 ) ) ) return sorted ( calc_ids ) | Extract the available calculation IDs from the datadir in order . |
25,294 | def get_last_calc_id ( datadir = None ) : datadir = datadir or get_datadir ( ) calcs = get_calc_ids ( datadir ) if not calcs : return 0 return calcs [ - 1 ] | Extract the latest calculation ID from the given directory . If none is found return 0 . |
25,295 | def open ( self , mode ) : if self . hdf5 == ( ) : kw = dict ( mode = mode , libver = 'latest' ) if mode == 'r' : kw [ 'swmr' ] = True try : self . hdf5 = hdf5 . File ( self . filename , ** kw ) except OSError as exc : raise OSError ( '%s in %s' % ( exc , self . filename ) ) | Open the underlying . hdf5 file and the parent if any |
25,296 | def create_dset ( self , key , dtype , shape = ( None , ) , compression = None , fillvalue = 0 , attrs = None ) : return hdf5 . create ( self . hdf5 , key , dtype , shape , compression , fillvalue , attrs ) | Create a one - dimensional HDF5 dataset . |
25,297 | def extend ( self , key , array , ** attrs ) : try : dset = self . hdf5 [ key ] except KeyError : dset = hdf5 . create ( self . hdf5 , key , array . dtype , shape = ( None , ) + array . shape [ 1 : ] ) hdf5 . extend ( dset , array ) for k , v in attrs . items ( ) : dset . attrs [ k ] = v return dset | Extend the dataset associated to the given key ; create it if needed |
25,298 | def save ( self , key , kw ) : if key not in self : obj = hdf5 . LiteralAttrs ( ) else : obj = self [ key ] vars ( obj ) . update ( kw ) self [ key ] = obj self . flush ( ) | Update the object associated to key with the kw dictionary ; works for LiteralAttrs objects and automatically flushes . |
25,299 | def export_path ( self , relname , export_dir = None ) : name , ext = relname . replace ( '/' , '-' ) . rsplit ( '.' , 1 ) newname = '%s_%s.%s' % ( name , self . calc_id , ext ) if export_dir is None : export_dir = self . export_dir return os . path . join ( export_dir , newname ) | Return the path of the exported file by adding the export_dir in front the calculation ID at the end . |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.