idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
44,400
def trt_pmf ( matrices ) : ntrts , nmags , ndists , nlons , nlats , neps = matrices . shape pmf = numpy . zeros ( ntrts ) for t in range ( ntrts ) : pmf [ t ] = 1. - numpy . prod ( [ 1. - matrices [ t , i , j , k , l , m ] for i in range ( nmags ) for j in range ( ndists ) for k in range ( nlons ) for l in range ( nlats ) for m in range ( neps ) ] ) return pmf
Fold full disaggregation matrix to tectonic region type PMF .
44,401
def db ( cmd , args = ( ) ) : if cmd not in commands : okcmds = '\n' . join ( '%s %s' % ( name , repr ( ' ' . join ( args ) ) if args else '' ) for name , args in sorted ( commands . items ( ) ) ) print ( 'Invalid command "%s": choose one from\n%s' % ( cmd , okcmds ) ) elif len ( args ) != len ( commands [ cmd ] ) : print ( 'Wrong number of arguments, expected %s, got %s' % ( commands [ cmd ] , args ) ) else : dbserver . ensure_on ( ) res = logs . dbcmd ( cmd , * convert ( args ) ) if hasattr ( res , '_fields' ) and res . __class__ . __name__ != 'Row' : print ( rst_table ( res ) ) else : print ( res )
Run a database command
44,402
def mean_curve ( values , weights = None ) : if weights is None : weights = [ 1. / len ( values ) ] * len ( values ) if not isinstance ( values , numpy . ndarray ) : values = numpy . array ( values ) return numpy . average ( values , axis = 0 , weights = weights )
Compute the mean by using numpy . average on the first axis .
44,403
def quantile_curve ( quantile , curves , weights = None ) : if not isinstance ( curves , numpy . ndarray ) : curves = numpy . array ( curves ) R = len ( curves ) if weights is None : weights = numpy . ones ( R ) / R else : weights = numpy . array ( weights ) assert len ( weights ) == R , ( len ( weights ) , R ) result = numpy . zeros ( curves . shape [ 1 : ] ) for idx , _ in numpy . ndenumerate ( result ) : data = numpy . array ( [ a [ idx ] for a in curves ] ) sorted_idxs = numpy . argsort ( data ) sorted_weights = weights [ sorted_idxs ] sorted_data = data [ sorted_idxs ] cum_weights = numpy . cumsum ( sorted_weights ) result [ idx ] = numpy . interp ( quantile , cum_weights , sorted_data ) return result
Compute the weighted quantile aggregate of a set of curves .
44,404
def _compute_mean ( self , C , mag , rjb ) : mean = ( C [ 'c1' ] + self . _compute_term1 ( C , mag ) + self . _compute_term2 ( C , mag , rjb ) ) return mean
Compute mean value according to equation 3 page 46 .
44,405
def _compute_stddevs ( self , C , mag , rjb , imt , stddev_types ) : sigma_ale_m = np . interp ( mag , [ 5.0 , 5.5 , 8.0 ] , [ C [ 'm50' ] , C [ 'm55' ] , C [ 'm80' ] ] ) sigma_ale_rjb = np . interp ( rjb , [ 5.0 , 20.0 ] , [ C [ 'r5' ] , C [ 'r20' ] ] ) sigma_ale = np . sqrt ( sigma_ale_m ** 2 + sigma_ale_rjb ** 2 ) if imt . period < 1 : sigma_epi = 0.36 + 0.07 * ( mag - 6 ) else : sigma_epi = 0.34 + 0.06 * ( mag - 6 ) sigma_total = np . sqrt ( sigma_ale ** 2 + sigma_epi ** 2 ) stddevs = [ ] for _ in stddev_types : stddevs . append ( sigma_total ) return stddevs
Compute total standard deviation equations 5 and 6 page 48 .
44,406
def BA08_AB06 ( self , vs30 , imt , pgar ) : fa = np . ones_like ( vs30 ) if np . isscalar ( vs30 ) : vs30 = np . array ( [ vs30 ] ) if np . isscalar ( pgar ) : pgar = np . array ( [ pgar ] ) vs = copy . copy ( vs30 ) vs [ vs >= 2000 ] = 1999. idx = np . where ( vs30 > 760 ) if np . size ( idx ) > 0 : C = self . COEFFS_BA08 [ imt ] nl = BooreAtkinson2008 ( ) . _get_site_amplification_non_linear ( vs [ idx ] , pgar [ idx ] , C ) lin = BooreAtkinson2008 ( ) . _get_site_amplification_linear ( vs [ idx ] , C ) tmp = np . exp ( nl + lin ) fa [ idx ] = tmp idx = np . where ( vs < 760. ) if np . size ( idx ) > 0 : C = self . COEFFS_BA08 [ imt ] nl = BooreAtkinson2008 ( ) . _get_site_amplification_non_linear ( vs [ idx ] , pgar [ idx ] , C ) lin = BooreAtkinson2008 ( ) . _get_site_amplification_linear ( vs [ idx ] , C ) fa [ idx ] = np . exp ( nl + lin ) return fa
Computes amplification factor similarly to what is done in the 2015 version of the Canada building code . An initial version of this code was kindly provided by Michal Kolaj - Geological Survey of Canada
44,407
def scenario_damage ( riskinputs , riskmodel , param , monitor ) : L = len ( riskmodel . loss_types ) D = len ( riskmodel . damage_states ) E = param [ 'number_of_ground_motion_fields' ] R = riskinputs [ 0 ] . hazard_getter . num_rlzs result = dict ( d_asset = [ ] , d_event = numpy . zeros ( ( E , R , L , D ) , F64 ) , c_asset = [ ] , c_event = numpy . zeros ( ( E , R , L ) , F64 ) ) for ri in riskinputs : for out in riskmodel . gen_outputs ( ri , monitor ) : r = out . rlzi for l , loss_type in enumerate ( riskmodel . loss_types ) : for asset , fractions in zip ( ri . assets , out [ loss_type ] ) : dmg = fractions [ : , : D ] * asset [ 'number' ] result [ 'd_event' ] [ : , r , l ] += dmg result [ 'd_asset' ] . append ( ( l , r , asset [ 'ordinal' ] , scientific . mean_std ( dmg ) ) ) if riskmodel . consequences : csq = fractions [ : , D ] * asset [ 'value-' + loss_type ] result [ 'c_asset' ] . append ( ( l , r , asset [ 'ordinal' ] , scientific . mean_std ( csq ) ) ) result [ 'c_event' ] [ : , r , l ] += csq return result
Core function for a damage computation .
44,408
def form ( value ) : if isinstance ( value , FLOAT + INT ) : if value <= 0 : return str ( value ) elif value < .001 : return '%.3E' % value elif value < 10 and isinstance ( value , FLOAT ) : return '%.5f' % value elif value > 1000 : return '{:,d}' . format ( int ( round ( value ) ) ) elif numpy . isnan ( value ) : return 'NaN' else : return str ( int ( value ) ) elif isinstance ( value , bytes ) : return decode ( value ) elif isinstance ( value , str ) : return value elif isinstance ( value , numpy . object_ ) : return str ( value ) elif hasattr ( value , '__len__' ) and len ( value ) > 1 : return ' ' . join ( map ( form , value ) ) return str ( value )
Format numbers in a nice way .
44,409
def sum_tbl ( tbl , kfield , vfields ) : pairs = [ ( n , tbl . dtype [ n ] ) for n in [ kfield ] + vfields ] dt = numpy . dtype ( pairs + [ ( 'counts' , int ) ] ) def sum_all ( group ) : vals = numpy . zeros ( 1 , dt ) [ 0 ] for rec in group : for vfield in vfields : vals [ vfield ] += rec [ vfield ] vals [ 'counts' ] += 1 vals [ kfield ] = rec [ kfield ] return vals rows = groupby ( tbl , operator . itemgetter ( kfield ) , sum_all ) . values ( ) array = numpy . zeros ( len ( rows ) , dt ) for i , row in enumerate ( rows ) : for j , name in enumerate ( dt . names ) : array [ i ] [ name ] = row [ j ] return array
Aggregate a composite array and compute the totals on a given key .
44,410
def view_slow_sources ( token , dstore , maxrows = 20 ) : info = dstore [ 'source_info' ] . value info . sort ( order = 'calc_time' ) return rst_table ( info [ : : - 1 ] [ : maxrows ] )
Returns the slowest sources
44,411
def view_contents ( token , dstore ) : try : desc = dstore [ 'oqparam' ] . description except KeyError : desc = '' data = sorted ( ( dstore . getsize ( key ) , key ) for key in dstore ) rows = [ ( key , humansize ( nbytes ) ) for nbytes , key in data ] total = '\n%s : %s' % ( dstore . filename , humansize ( os . path . getsize ( dstore . filename ) ) ) return rst_table ( rows , header = ( desc , '' ) ) + total
Returns the size of the contents of the datastore and its total size
44,412
def view_job_info ( token , dstore ) : data = [ [ 'task' , 'sent' , 'received' ] ] for task in dstore [ 'task_info' ] : dset = dstore [ 'task_info/' + task ] if 'argnames' in dset . attrs : argnames = dset . attrs [ 'argnames' ] . split ( ) totsent = dset . attrs [ 'sent' ] sent = [ '%s=%s' % ( a , humansize ( s ) ) for s , a in sorted ( zip ( totsent , argnames ) , reverse = True ) ] recv = dset [ 'received' ] . sum ( ) data . append ( ( task , ' ' . join ( sent ) , humansize ( recv ) ) ) return rst_table ( data )
Determine the amount of data transferred from the controller node to the workers and back in a classical calculation .
44,413
def avglosses_data_transfer ( token , dstore ) : oq = dstore [ 'oqparam' ] N = len ( dstore [ 'assetcol' ] ) R = dstore [ 'csm_info' ] . get_num_rlzs ( ) L = len ( dstore . get_attr ( 'risk_model' , 'loss_types' ) ) ct = oq . concurrent_tasks size_bytes = N * R * L * 8 * ct return ( '%d asset(s) x %d realization(s) x %d loss type(s) losses x ' '8 bytes x %d tasks = %s' % ( N , R , L , ct , humansize ( size_bytes ) ) )
Determine the amount of average losses transferred from the workers to the controller node in a risk calculation .
44,414
def ebr_data_transfer ( token , dstore ) : attrs = dstore [ 'losses_by_event' ] . attrs sent = humansize ( attrs [ 'sent' ] ) received = humansize ( attrs [ 'tot_received' ] ) return 'Event Based Risk: sent %s, received %s' % ( sent , received )
Display the data transferred in an event based risk calculation
44,415
def view_totlosses ( token , dstore ) : oq = dstore [ 'oqparam' ] tot_losses = dstore [ 'losses_by_asset' ] [ 'mean' ] . sum ( axis = 0 ) return rst_table ( tot_losses . view ( oq . loss_dt ( ) ) , fmt = '%.6E' )
This is a debugging view . You can use it to check that the total losses i . e . the losses obtained by summing the average losses on all assets are indeed equal to the aggregate losses . This is a sanity check for the correctness of the implementation .
44,416
def view_portfolio_losses ( token , dstore ) : oq = dstore [ 'oqparam' ] loss_dt = oq . loss_dt ( ) data = portfolio_loss ( dstore ) . view ( loss_dt ) [ : , 0 ] rlzids = [ str ( r ) for r in range ( len ( data ) ) ] array = util . compose_arrays ( numpy . array ( rlzids ) , data , 'rlz' ) return rst_table ( array , fmt = '%.5E' )
The losses for the full portfolio for each realization and loss type extracted from the event loss table .
44,417
def view_portfolio_loss ( token , dstore ) : data = portfolio_loss ( dstore ) loss_types = list ( dstore [ 'oqparam' ] . loss_dt ( ) . names ) header = [ 'portfolio_loss' ] + loss_types mean = [ 'mean' ] + [ row . mean ( ) for row in data . T ] stddev = [ 'stddev' ] + [ row . std ( ddof = 1 ) for row in data . T ] return rst_table ( [ mean , stddev ] , header )
The mean and stddev loss for the full portfolio for each loss type extracted from the event loss table averaged over the realizations
44,418
def view_exposure_info ( token , dstore ) : assetcol = dstore [ 'assetcol/array' ] [ : ] taxonomies = sorted ( set ( dstore [ 'assetcol' ] . taxonomies ) ) cc = dstore [ 'assetcol/cost_calculator' ] ra_flag = [ 'relative' , 'absolute' ] data = [ ( '#assets' , len ( assetcol ) ) , ( '#taxonomies' , len ( taxonomies ) ) , ( 'deductibile' , ra_flag [ int ( cc . deduct_abs ) ] ) , ( 'insurance_limit' , ra_flag [ int ( cc . limit_abs ) ] ) , ] return rst_table ( data ) + '\n\n' + view_assets_by_site ( token , dstore )
Display info about the exposure model
44,419
def view_fullreport ( token , dstore ) : from openquake . calculators . reportwriter import ReportWriter return ReportWriter ( dstore ) . make_report ( )
Display an . rst report about the computation
44,420
def performance_view ( dstore ) : data = sorted ( dstore [ 'performance_data' ] , key = operator . itemgetter ( 0 ) ) out = [ ] for operation , group in itertools . groupby ( data , operator . itemgetter ( 0 ) ) : counts = 0 time = 0 mem = 0 for _operation , time_sec , memory_mb , counts_ in group : counts += counts_ time += time_sec mem = max ( mem , memory_mb ) out . append ( ( operation , time , mem , counts ) ) out . sort ( key = operator . itemgetter ( 1 ) , reverse = True ) return numpy . array ( out , perf_dt )
Returns the performance view as a numpy array .
44,421
def stats ( name , array , * extras ) : std = numpy . nan if len ( array ) == 1 else numpy . std ( array , ddof = 1 ) return ( name , numpy . mean ( array ) , std , numpy . min ( array ) , numpy . max ( array ) , len ( array ) ) + extras
Returns statistics from an array of numbers .
44,422
def view_num_units ( token , dstore ) : taxo = dstore [ 'assetcol/tagcol/taxonomy' ] . value counts = collections . Counter ( ) for asset in dstore [ 'assetcol' ] : counts [ taxo [ asset [ 'taxonomy' ] ] ] += asset [ 'number' ] data = sorted ( counts . items ( ) ) data . append ( ( '*ALL*' , sum ( d [ 1 ] for d in data ) ) ) return rst_table ( data , header = [ 'taxonomy' , 'num_units' ] )
Display the number of units by taxonomy
44,423
def view_assets_by_site ( token , dstore ) : taxonomies = dstore [ 'assetcol/tagcol/taxonomy' ] . value assets_by_site = dstore [ 'assetcol' ] . assets_by_site ( ) data = [ 'taxonomy mean stddev min max num_sites num_assets' . split ( ) ] num_assets = AccumDict ( ) for assets in assets_by_site : num_assets += { k : [ len ( v ) ] for k , v in group_array ( assets , 'taxonomy' ) . items ( ) } for taxo in sorted ( num_assets ) : val = numpy . array ( num_assets [ taxo ] ) data . append ( stats ( taxonomies [ taxo ] , val , val . sum ( ) ) ) if len ( num_assets ) > 1 : n_assets = numpy . array ( [ len ( assets ) for assets in assets_by_site ] ) data . append ( stats ( '*ALL*' , n_assets , n_assets . sum ( ) ) ) return rst_table ( data )
Display statistical information about the distribution of the assets
44,424
def view_required_params_per_trt ( token , dstore ) : csm_info = dstore [ 'csm_info' ] tbl = [ ] for grp_id , trt in sorted ( csm_info . grp_by ( "trt" ) . items ( ) ) : gsims = csm_info . gsim_lt . get_gsims ( trt ) maker = ContextMaker ( trt , gsims ) distances = sorted ( maker . REQUIRES_DISTANCES ) siteparams = sorted ( maker . REQUIRES_SITES_PARAMETERS ) ruptparams = sorted ( maker . REQUIRES_RUPTURE_PARAMETERS ) tbl . append ( ( grp_id , ' ' . join ( map ( repr , map ( repr , gsims ) ) ) , distances , siteparams , ruptparams ) ) return rst_table ( tbl , header = 'grp_id gsims distances siteparams ruptparams' . split ( ) , fmt = scientificformat )
Display the parameters needed by each tectonic region type
44,425
def view_global_hcurves ( token , dstore ) : oq = dstore [ 'oqparam' ] nsites = len ( dstore [ 'sitecol' ] ) rlzs_assoc = dstore [ 'csm_info' ] . get_rlzs_assoc ( ) mean = getters . PmapGetter ( dstore , rlzs_assoc ) . get_mean ( ) array = calc . convert_to_array ( mean , nsites , oq . imtls ) res = numpy . zeros ( 1 , array . dtype ) for name in array . dtype . names : res [ name ] = array [ name ] . mean ( ) return rst_table ( res )
Display the global hazard curves for the calculation . They are used for debugging purposes when comparing the results of two calculations . They are the mean over the sites of the mean hazard curves .
44,426
def view_dupl_sources_time ( token , dstore ) : info = dstore [ 'source_info' ] items = sorted ( group_array ( info . value , 'source_id' ) . items ( ) ) tbl = [ ] tot_time = 0 for source_id , records in items : if len ( records ) > 1 : calc_time = records [ 'calc_time' ] . sum ( ) tot_time += calc_time + records [ 'split_time' ] . sum ( ) tbl . append ( ( source_id , calc_time , len ( records ) ) ) if tbl and info . attrs . get ( 'has_dupl_sources' ) : tot = info [ 'calc_time' ] . sum ( ) + info [ 'split_time' ] . sum ( ) percent = tot_time / tot * 100 m = '\nTotal time in duplicated sources: %d/%d (%d%%)' % ( tot_time , tot , percent ) return rst_table ( tbl , [ 'source_id' , 'calc_time' , 'num_dupl' ] ) + m else : return 'There are no duplicated sources'
Display the time spent computing duplicated sources
44,427
def view_global_poes ( token , dstore ) : tbl = [ ] imtls = dstore [ 'oqparam' ] . imtls header = [ 'grp_id' ] + [ str ( poe ) for poe in imtls . array ] for grp in sorted ( dstore [ 'poes' ] ) : poes = dstore [ 'poes/' + grp ] nsites = len ( poes ) site_avg = sum ( poes [ sid ] . array for sid in poes ) / nsites gsim_avg = site_avg . sum ( axis = 1 ) / poes . shape_z tbl . append ( [ grp ] + list ( gsim_avg ) ) return rst_table ( tbl , header = header )
Display global probabilities averaged on all sites and all GMPEs
44,428
def view_global_hmaps ( token , dstore ) : oq = dstore [ 'oqparam' ] dt = numpy . dtype ( [ ( '%s-%s' % ( imt , poe ) , F32 ) for imt in oq . imtls for poe in oq . poes ] ) array = dstore [ 'hmaps/mean' ] . value . view ( dt ) [ : , 0 ] res = numpy . zeros ( 1 , array . dtype ) for name in array . dtype . names : res [ name ] = array [ name ] . mean ( ) return rst_table ( res )
Display the global hazard maps for the calculation . They are used for debugging purposes when comparing the results of two calculations . They are the mean over the sites of the mean hazard maps .
44,429
def view_global_gmfs ( token , dstore ) : imtls = dstore [ 'oqparam' ] . imtls row = dstore [ 'gmf_data/data' ] [ 'gmv' ] . mean ( axis = 0 ) return rst_table ( [ row ] , header = imtls )
Display GMFs averaged on everything for debugging purposes
44,430
def view_mean_disagg ( token , dstore ) : tbl = [ ] for key , dset in sorted ( dstore [ 'disagg' ] . items ( ) ) : vals = [ ds . value . mean ( ) for k , ds in sorted ( dset . items ( ) ) ] tbl . append ( [ key ] + vals ) header = [ 'key' ] + sorted ( dset ) return rst_table ( sorted ( tbl ) , header = header )
Display mean quantities for the disaggregation . Useful for checking differences between two calculations .
44,431
def view_elt ( token , dstore ) : oq = dstore [ 'oqparam' ] R = len ( dstore [ 'csm_info' ] . rlzs ) dic = group_array ( dstore [ 'losses_by_event' ] . value , 'rlzi' ) header = oq . loss_dt ( ) . names tbl = [ ] for rlzi in range ( R ) : if rlzi in dic : tbl . append ( dic [ rlzi ] [ 'loss' ] . mean ( axis = 0 ) ) else : tbl . append ( [ 0. ] * len ( header ) ) return rst_table ( tbl , header )
Display the event loss table averaged by event
44,432
def view_pmap ( token , dstore ) : grp = token . split ( ':' ) [ 1 ] pmap = { } rlzs_assoc = dstore [ 'csm_info' ] . get_rlzs_assoc ( ) pgetter = getters . PmapGetter ( dstore , rlzs_assoc ) pmap = pgetter . get_mean ( grp ) return str ( pmap )
Display the mean ProbabilityMap associated to a given source group name
44,433
def view_act_ruptures_by_src ( token , dstore ) : data = dstore [ 'ruptures' ] . value [ [ 'srcidx' , 'serial' ] ] counts = sorted ( countby ( data , 'srcidx' ) . items ( ) , key = operator . itemgetter ( 1 ) , reverse = True ) src_info = dstore [ 'source_info' ] . value [ [ 'grp_id' , 'source_id' ] ] table = [ [ 'src_id' , 'grp_id' , 'act_ruptures' ] ] for srcidx , act_ruptures in counts : src = src_info [ srcidx ] table . append ( [ src [ 'source_id' ] , src [ 'grp_id' ] , act_ruptures ] ) return rst_table ( table )
Display the actual number of ruptures by source in event based calculations
44,434
def view_dupl_sources ( token , dstore ) : fields = [ 'source_id' , 'code' , 'gidx1' , 'gidx2' , 'num_ruptures' ] dic = group_array ( dstore [ 'source_info' ] . value [ fields ] , 'source_id' ) sameid = [ ] dupl = [ ] for source_id , group in dic . items ( ) : if len ( group ) > 1 : sources = [ ] for rec in group : geom = dstore [ 'source_geom' ] [ rec [ 'gidx1' ] : rec [ 'gidx2' ] ] src = Source ( source_id , rec [ 'code' ] , geom , rec [ 'num_ruptures' ] ) sources . append ( src ) if all_equal ( sources ) : dupl . append ( source_id ) sameid . append ( source_id ) if not dupl : return '' msg = str ( dupl ) + '\n' msg += ( 'Found %d source(s) with the same ID and %d true duplicate(s)' % ( len ( sameid ) , len ( dupl ) ) ) fakedupl = set ( sameid ) - set ( dupl ) if fakedupl : msg += '\nHere is a fake duplicate: %s' % fakedupl . pop ( ) return msg
Show the sources with the same ID and the truly duplicated sources
44,435
def view_extreme_groups ( token , dstore ) : data = dstore [ 'disagg_by_grp' ] . value data . sort ( order = 'extreme_poe' ) return rst_table ( data [ : : - 1 ] )
Show the source groups contributing the most to the highest IML
44,436
def zip_all ( directory ) : zips = [ ] for cwd , dirs , files in os . walk ( directory ) : if 'ssmLT.xml' in files : zips . append ( zip_source_model ( os . path . join ( cwd , 'ssmLT.xml' ) ) ) for f in files : if f . endswith ( '.xml' ) and 'exposure' in f . lower ( ) : zips . append ( zip_exposure ( os . path . join ( cwd , f ) ) ) total = sum ( os . path . getsize ( z ) for z in zips ) logging . info ( 'Generated %s of zipped data' , general . humansize ( total ) )
Zip source models and exposures recursively
44,437
def zip_source_model ( ssmLT , archive_zip = '' , log = logging . info ) : basedir = os . path . dirname ( ssmLT ) if os . path . basename ( ssmLT ) != 'ssmLT.xml' : orig = ssmLT ssmLT = os . path . join ( basedir , 'ssmLT.xml' ) with open ( ssmLT , 'wb' ) as f : f . write ( open ( orig , 'rb' ) . read ( ) ) archive_zip = archive_zip or os . path . join ( basedir , 'ssmLT.zip' ) if os . path . exists ( archive_zip ) : sys . exit ( '%s exists already' % archive_zip ) oq = mock . Mock ( inputs = { 'source_model_logic_tree' : ssmLT } ) checksum = readinput . get_checksum32 ( oq ) checkfile = os . path . join ( os . path . dirname ( ssmLT ) , 'CHECKSUM.txt' ) with open ( checkfile , 'w' ) as f : f . write ( str ( checksum ) ) files = logictree . collect_info ( ssmLT ) . smpaths + [ os . path . abspath ( ssmLT ) , os . path . abspath ( checkfile ) ] general . zipfiles ( files , archive_zip , log = log , cleanup = True ) return archive_zip
Zip the source model files starting from the smmLT . xml file
44,438
def zip_job ( job_ini , archive_zip = '' , risk_ini = '' , oq = None , log = logging . info ) : if not os . path . exists ( job_ini ) : sys . exit ( '%s does not exist' % job_ini ) archive_zip = archive_zip or 'job.zip' if isinstance ( archive_zip , str ) : if not archive_zip . endswith ( '.zip' ) : sys . exit ( '%s does not end with .zip' % archive_zip ) if os . path . exists ( archive_zip ) : sys . exit ( '%s exists already' % archive_zip ) oq = oq or readinput . get_oqparam ( job_ini , validate = False ) if risk_ini : risk_ini = os . path . normpath ( os . path . abspath ( risk_ini ) ) risk_inputs = readinput . get_params ( [ risk_ini ] ) [ 'inputs' ] del risk_inputs [ 'job_ini' ] oq . inputs . update ( risk_inputs ) files = readinput . get_input_files ( oq ) if risk_ini : files = [ risk_ini ] + files return general . zipfiles ( files , archive_zip , log = log )
Zip the given job . ini file into the given archive together with all related files .
44,439
def build_report ( job_ini , output_dir = None ) : calc_id = logs . init ( ) oq = readinput . get_oqparam ( job_ini ) if oq . calculation_mode == 'classical' : oq . calculation_mode = 'preclassical' oq . ground_motion_fields = False output_dir = output_dir or os . path . dirname ( job_ini ) from openquake . calculators import base calc = base . calculators ( oq , calc_id ) calc . save_params ( ) calc . pre_execute ( ) if oq . calculation_mode == 'preclassical' : calc . execute ( ) rw = ReportWriter ( calc . datastore ) rw . make_report ( ) report = ( os . path . join ( output_dir , 'report.rst' ) if output_dir else calc . datastore . export_path ( 'report.rst' ) ) try : rw . save ( report ) except IOError as exc : sys . stderr . write ( str ( exc ) + '\n' ) readinput . exposure = None return report
Write a report . csv file with information about the calculation without running it
44,440
def add ( self , name , obj = None ) : if obj : text = '\n::\n\n' + indent ( str ( obj ) ) else : text = views . view ( name , self . dstore ) if text : title = self . title [ name ] line = '-' * len ( title ) self . text += '\n' . join ( [ '\n\n' + title , line , text ] )
Add the view named name to the report text
44,441
def make_report ( self ) : oq , ds = self . oq , self . dstore for name in ( 'params' , 'inputs' ) : self . add ( name ) if 'csm_info' in ds : self . add ( 'csm_info' ) if ds [ 'csm_info' ] . source_models [ 0 ] . name != 'scenario' : self . add ( 'required_params_per_trt' ) self . add ( 'rlzs_assoc' , ds [ 'csm_info' ] . get_rlzs_assoc ( ) ) if 'csm_info' in ds : self . add ( 'ruptures_per_trt' ) if 'rup_data' in ds : self . add ( 'ruptures_events' ) if oq . calculation_mode in ( 'event_based_risk' , ) : self . add ( 'avglosses_data_transfer' ) if 'exposure' in oq . inputs : self . add ( 'exposure_info' ) if 'source_info' in ds : self . add ( 'slow_sources' ) self . add ( 'times_by_source_class' ) self . add ( 'dupl_sources' ) if 'task_info' in ds : self . add ( 'task_info' ) tasks = set ( ds [ 'task_info' ] ) if 'classical' in tasks : self . add ( 'task_hazard:0' ) self . add ( 'task_hazard:-1' ) self . add ( 'job_info' ) if 'performance_data' in ds : self . add ( 'performance' ) return self . text
Build the report and return a restructed text string
44,442
def save ( self , fname ) : with open ( fname , 'wb' ) as f : f . write ( encode ( self . text ) )
Save the report
44,443
def _compute_mean ( self , imt , mag , rhypo ) : mag = np . zeros_like ( rhypo ) + self . _convert_magnitude ( mag ) rhypo [ rhypo < 10 ] = 10 rhypo = np . log10 ( rhypo ) table = RectBivariateSpline ( self . MAGS , self . DISTS , self . IMTS_TABLES [ imt ] . T ) mean = table . ev ( mag , rhypo ) return mean * np . log ( 10 )
Compute mean value from lookup table .
44,444
def _get_recurrence_model ( input_model ) : if not isinstance ( input_model , ( TruncatedGRMFD , EvenlyDiscretizedMFD , YoungsCoppersmith1985MFD ) ) : raise ValueError ( 'Recurrence model not recognised' ) annual_rates = input_model . get_annual_occurrence_rates ( ) annual_rates = np . array ( [ [ val [ 0 ] , val [ 1 ] ] for val in annual_rates ] ) cumulative_rates = np . array ( [ np . sum ( annual_rates [ iloc : , 1 ] ) for iloc in range ( 0 , len ( annual_rates ) , 1 ) ] ) return annual_rates , cumulative_rates
Returns the annual and cumulative recurrence rates predicted by the recurrence model
44,445
def _check_completeness_table ( completeness , catalogue ) : if isinstance ( completeness , np . ndarray ) and np . shape ( completeness ) [ 1 ] == 2 : return completeness elif isinstance ( completeness , float ) : return np . array ( [ [ float ( np . min ( catalogue . data [ 'year' ] ) ) , completeness ] ] ) elif completeness is None : return np . array ( [ [ float ( np . min ( catalogue . data [ 'year' ] ) ) , np . min ( catalogue . data [ 'magnitude' ] ) ] ] ) else : raise ValueError ( 'Completeness representation not recognised' )
Generates the completeness table according to different instances
44,446
def plot_recurrence_model ( input_model , catalogue , completeness , dmag = 0.1 , filename = None , figure_size = ( 8 , 6 ) , filetype = 'png' , dpi = 300 , ax = None ) : annual_rates , cumulative_rates = _get_recurrence_model ( input_model ) if not catalogue . end_year : catalogue . update_end_year ( ) cent_mag , t_per , n_obs = get_completeness_counts ( catalogue , completeness , dmag ) obs_rates = n_obs / t_per cum_obs_rates = np . array ( [ np . sum ( obs_rates [ i : ] ) for i in range ( len ( obs_rates ) ) ] ) if ax is None : fig , ax = plt . subplots ( figsize = figure_size ) else : fig = ax . get_figure ( ) ax . semilogy ( cent_mag , obs_rates , 'bo' ) ax . semilogy ( annual_rates [ : , 0 ] , annual_rates [ : , 1 ] , 'b-' ) ax . semilogy ( cent_mag , cum_obs_rates , 'rs' ) ax . semilogy ( annual_rates [ : , 0 ] , cumulative_rates , 'r-' ) ax . grid ( which = 'both' ) ax . set_xlabel ( 'Magnitude' ) ax . set_ylabel ( 'Annual Rate' ) ax . legend ( [ 'Observed Incremental Rate' , 'Model Incremental Rate' , 'Observed Cumulative Rate' , 'Model Cumulative Rate' ] ) ax . tick_params ( labelsize = 12 ) _save_image ( fig , filename , filetype , dpi )
Plot a calculated recurrence model over an observed catalogue adjusted for time - varying completeness
44,447
def plot_trunc_gr_model ( aval , bval , min_mag , max_mag , dmag , catalogue = None , completeness = None , filename = None , figure_size = ( 8 , 6 ) , filetype = 'png' , dpi = 300 , ax = None ) : input_model = TruncatedGRMFD ( min_mag , max_mag , dmag , aval , bval ) if not catalogue : annual_rates , cumulative_rates = _get_recurrence_model ( input_model ) if ax is None : fig , ax = plt . subplots ( figsize = figure_size ) else : fig = ax . get_figure ( ) ax . semilogy ( annual_rates [ : , 0 ] , annual_rates [ : , 1 ] , 'b-' ) ax . semilogy ( annual_rates [ : , 0 ] , cumulative_rates , 'r-' ) ax . xlabel ( 'Magnitude' ) ax . set_ylabel ( 'Annual Rate' ) ax . set_legend ( [ 'Incremental Rate' , 'Cumulative Rate' ] ) _save_image ( fig , filename , filetype , dpi ) else : completeness = _check_completeness_table ( completeness , catalogue ) plot_recurrence_model ( input_model , catalogue , completeness , dmag , filename = filename , figure_size = figure_size , filetype = filetype , dpi = dpi , ax = ax )
Plots a Gutenberg - Richter model
44,448
def read ( source , chatty = True , stop = None ) : vparser = ValidatingXmlParser ( validators , stop ) nrml = vparser . parse_file ( source ) if striptag ( nrml . tag ) != 'nrml' : raise ValueError ( '%s: expected a node of kind nrml, got %s' % ( source , nrml . tag ) ) xmlns = nrml . tag . split ( '}' ) [ 0 ] [ 1 : ] if xmlns != NRML05 and chatty : logging . debug ( '%s is at an outdated version: %s' , source , xmlns ) nrml [ 'xmlns' ] = xmlns nrml [ 'xmlns:gml' ] = GML_NAMESPACE return nrml
Convert a NRML file into a validated Node object . Keeps the entire tree in memory .
44,449
def write ( nodes , output = sys . stdout , fmt = '%.7E' , gml = True , xmlns = None ) : root = Node ( 'nrml' , nodes = nodes ) namespaces = { xmlns or NRML05 : '' } if gml : namespaces [ GML_NAMESPACE ] = 'gml:' with floatformat ( fmt ) : node_to_xml ( root , output , namespaces ) if hasattr ( output , 'mode' ) and '+' in output . mode : output . seek ( 0 ) read ( output )
Convert nodes into a NRML file . output must be a file object open in write mode . If you want to perform a consistency check open it in read - write mode then it will be read after creation and validated .
44,450
def to_string ( node ) : with io . BytesIO ( ) as f : write ( [ node ] , f ) return f . getvalue ( ) . decode ( 'utf-8' )
Convert a node into a string in NRML format
44,451
def get_mean_values ( self , C , sites , rup , dists , a1100 ) : if isinstance ( a1100 , np . ndarray ) : temp_vs30 = sites . vs30 temp_z2pt5 = sites . z2pt5 else : temp_vs30 = 1100.0 * np . ones ( len ( sites . vs30 ) ) temp_z2pt5 = self . _select_basin_model ( 1100.0 ) * np . ones_like ( temp_vs30 ) return ( self . _get_magnitude_term ( C , rup . mag ) + self . _get_geometric_attenuation_term ( C , rup . mag , dists . rrup ) + self . _get_style_of_faulting_term ( C , rup ) + self . _get_hanging_wall_term ( C , rup , dists ) + self . _get_shallow_site_response_term ( C , temp_vs30 , a1100 ) + self . _get_basin_response_term ( C , temp_z2pt5 ) + self . _get_hypocentral_depth_term ( C , rup ) + self . _get_fault_dip_term ( C , rup ) + self . _get_anelastic_attenuation_term ( C , dists . rrup ) )
Returns the mean values for a specific IMT
44,452
def _get_magnitude_term ( self , C , mag ) : f_mag = C [ "c0" ] + C [ "c1" ] * mag if ( mag > 4.5 ) and ( mag <= 5.5 ) : return f_mag + ( C [ "c2" ] * ( mag - 4.5 ) ) elif ( mag > 5.5 ) and ( mag <= 6.5 ) : return f_mag + ( C [ "c2" ] * ( mag - 4.5 ) ) + ( C [ "c3" ] * ( mag - 5.5 ) ) elif mag > 6.5 : return f_mag + ( C [ "c2" ] * ( mag - 4.5 ) ) + ( C [ "c3" ] * ( mag - 5.5 ) ) + ( C [ "c4" ] * ( mag - 6.5 ) ) else : return f_mag
Returns the magnitude scaling term defined in equation 2
44,453
def _get_geometric_attenuation_term ( self , C , mag , rrup ) : return ( C [ "c5" ] + C [ "c6" ] * mag ) * np . log ( np . sqrt ( ( rrup ** 2. ) + ( C [ "c7" ] ** 2. ) ) )
Returns the geometric attenuation term defined in equation 3
44,454
def _get_style_of_faulting_term ( self , C , rup ) : if ( rup . rake > 30.0 ) and ( rup . rake < 150. ) : frv = 1.0 fnm = 0.0 elif ( rup . rake > - 150.0 ) and ( rup . rake < - 30.0 ) : fnm = 1.0 frv = 0.0 else : fnm = 0.0 frv = 0.0 fflt_f = ( self . CONSTS [ "c8" ] * frv ) + ( C [ "c9" ] * fnm ) if rup . mag <= 4.5 : fflt_m = 0.0 elif rup . mag > 5.5 : fflt_m = 1.0 else : fflt_m = rup . mag - 4.5 return fflt_f * fflt_m
Returns the style - of - faulting scaling term defined in equations 4 to 6
44,455
def _get_hanging_wall_term ( self , C , rup , dists ) : return ( C [ "c10" ] * self . _get_hanging_wall_coeffs_rx ( C , rup , dists . rx ) * self . _get_hanging_wall_coeffs_rrup ( dists ) * self . _get_hanging_wall_coeffs_mag ( C , rup . mag ) * self . _get_hanging_wall_coeffs_ztor ( rup . ztor ) * self . _get_hanging_wall_coeffs_dip ( rup . dip ) )
Returns the hanging wall scaling term defined in equations 7 to 16
44,456
def _get_hanging_wall_coeffs_rx ( self , C , rup , r_x ) : r_1 = rup . width * cos ( radians ( rup . dip ) ) r_2 = 62.0 * rup . mag - 350.0 fhngrx = np . zeros ( len ( r_x ) ) idx = np . logical_and ( r_x >= 0. , r_x < r_1 ) fhngrx [ idx ] = self . _get_f1rx ( C , r_x [ idx ] , r_1 ) idx = r_x >= r_1 f2rx = self . _get_f2rx ( C , r_x [ idx ] , r_1 , r_2 ) f2rx [ f2rx < 0.0 ] = 0.0 fhngrx [ idx ] = f2rx return fhngrx
Returns the hanging wall r - x caling term defined in equation 7 to 12
44,457
def _get_f1rx ( self , C , r_x , r_1 ) : rxr1 = r_x / r_1 return C [ "h1" ] + ( C [ "h2" ] * rxr1 ) + ( C [ "h3" ] * ( rxr1 ** 2. ) )
Defines the f1 scaling coefficient defined in equation 9
44,458
def _get_f2rx ( self , C , r_x , r_1 , r_2 ) : drx = ( r_x - r_1 ) / ( r_2 - r_1 ) return self . CONSTS [ "h4" ] + ( C [ "h5" ] * drx ) + ( C [ "h6" ] * ( drx ** 2. ) )
Defines the f2 scaling coefficient defined in equation 10
44,459
def _get_hanging_wall_coeffs_rrup ( self , dists ) : fhngrrup = np . ones ( len ( dists . rrup ) ) idx = dists . rrup > 0.0 fhngrrup [ idx ] = ( dists . rrup [ idx ] - dists . rjb [ idx ] ) / dists . rrup [ idx ] return fhngrrup
Returns the hanging wall rrup term defined in equation 13
44,460
def _get_hanging_wall_coeffs_mag ( self , C , mag ) : if mag < 5.5 : return 0.0 elif mag > 6.5 : return 1.0 + C [ "a2" ] * ( mag - 6.5 ) else : return ( mag - 5.5 ) * ( 1.0 + C [ "a2" ] * ( mag - 6.5 ) )
Returns the hanging wall magnitude term defined in equation 14
44,461
def _get_hypocentral_depth_term ( self , C , rup ) : if rup . hypo_depth <= 7.0 : fhyp_h = 0.0 elif rup . hypo_depth > 20.0 : fhyp_h = 13.0 else : fhyp_h = rup . hypo_depth - 7.0 if rup . mag <= 5.5 : fhyp_m = C [ "c17" ] elif rup . mag > 6.5 : fhyp_m = C [ "c18" ] else : fhyp_m = C [ "c17" ] + ( ( C [ "c18" ] - C [ "c17" ] ) * ( rup . mag - 5.5 ) ) return fhyp_h * fhyp_m
Returns the hypocentral depth scaling term defined in equations 21 - 23
44,462
def _get_fault_dip_term ( self , C , rup ) : if rup . mag < 4.5 : return C [ "c19" ] * rup . dip elif rup . mag > 5.5 : return 0.0 else : return C [ "c19" ] * ( 5.5 - rup . mag ) * rup . dip
Returns the fault dip term defined in equation 24
44,463
def _get_anelastic_attenuation_term ( self , C , rrup ) : f_atn = np . zeros ( len ( rrup ) ) idx = rrup >= 80.0 f_atn [ idx ] = ( C [ "c20" ] + C [ "Dc20" ] ) * ( rrup [ idx ] - 80.0 ) return f_atn
Returns the anelastic attenuation term defined in equation 25
44,464
def _get_basin_response_term ( self , C , z2pt5 ) : f_sed = np . zeros ( len ( z2pt5 ) ) idx = z2pt5 < 1.0 f_sed [ idx ] = ( C [ "c14" ] + C [ "c15" ] * float ( self . CONSTS [ "SJ" ] ) ) * ( z2pt5 [ idx ] - 1.0 ) idx = z2pt5 > 3.0 f_sed [ idx ] = C [ "c16" ] * C [ "k3" ] * exp ( - 0.75 ) * ( 1.0 - np . exp ( - 0.25 * ( z2pt5 [ idx ] - 3.0 ) ) ) return f_sed
Returns the basin response term defined in equation 20
44,465
def _get_shallow_site_response_term ( self , C , vs30 , pga_rock ) : vs_mod = vs30 / C [ "k1" ] f_site_g = C [ "c11" ] * np . log ( vs_mod ) idx = vs30 > C [ "k1" ] f_site_g [ idx ] = f_site_g [ idx ] + ( C [ "k2" ] * self . CONSTS [ "n" ] * np . log ( vs_mod [ idx ] ) ) idx = np . logical_not ( idx ) if np . any ( idx ) : f_site_g [ idx ] = f_site_g [ idx ] + C [ "k2" ] * ( np . log ( pga_rock [ idx ] + self . CONSTS [ "c" ] * ( vs_mod [ idx ] ** self . CONSTS [ "n" ] ) ) - np . log ( pga_rock [ idx ] + self . CONSTS [ "c" ] ) ) if self . CONSTS [ "SJ" ] : fsite_j = np . log ( vs_mod ) idx = vs30 > 200.0 if np . any ( idx ) : fsite_j [ idx ] = ( C [ "c13" ] + C [ "k2" ] * self . CONSTS [ "n" ] ) * fsite_j [ idx ] idx = np . logical_not ( idx ) if np . any ( idx ) : fsite_j [ idx ] = ( C [ "c12" ] + C [ "k2" ] * self . CONSTS [ "n" ] ) * ( fsite_j [ idx ] - np . log ( 200.0 / C [ "k1" ] ) ) return f_site_g + fsite_j else : return f_site_g
Returns the shallow site response term defined in equations 17 18 and 19
44,466
def _get_stddevs ( self , C , C_PGA , rup , sites , pga1100 , stddev_types ) : tau_lnpga_b , phi_lnpga_b = self . _get_stddevs_pga ( C_PGA , rup ) num_sites = len ( sites . vs30 ) tau_lnyb = self . _get_taulny ( C , rup . mag ) phi_lnyb = np . sqrt ( self . _get_philny ( C , rup . mag ) ** 2. - self . CONSTS [ "philnAF" ] ** 2. ) alpha = self . _get_alpha ( C , sites . vs30 , pga1100 ) tau = np . sqrt ( ( tau_lnyb ** 2. ) + ( ( alpha ** 2. ) * ( tau_lnpga_b ** 2. ) ) + ( 2.0 * alpha * C [ "rholny" ] * tau_lnyb * tau_lnpga_b ) ) phi = np . sqrt ( ( phi_lnyb ** 2. ) + ( self . CONSTS [ "philnAF" ] ** 2. ) + ( ( alpha ** 2. ) * ( phi_lnpga_b ** 2. ) ) + ( 2.0 * alpha * C [ "rholny" ] * phi_lnyb * phi_lnpga_b ) ) stddevs = [ ] for stddev_type in stddev_types : assert stddev_type in self . DEFINED_FOR_STANDARD_DEVIATION_TYPES if stddev_type == const . StdDev . TOTAL : stddevs . append ( np . sqrt ( ( tau ** 2. ) + ( phi ** 2. ) ) + np . zeros ( num_sites ) ) elif stddev_type == const . StdDev . INTRA_EVENT : stddevs . append ( phi + np . zeros ( num_sites ) ) elif stddev_type == const . StdDev . INTER_EVENT : stddevs . append ( tau + np . zeros ( num_sites ) ) return stddevs
Returns the inter - and intra - event and total standard deviations
44,467
def _get_stddevs_pga ( self , C , rup ) : tau_lnpga_b = self . _get_taulny ( C , rup . mag ) phi_lnpga_b = np . sqrt ( self . _get_philny ( C , rup . mag ) ** 2. - self . CONSTS [ "philnAF" ] ** 2. ) return tau_lnpga_b , phi_lnpga_b
Returns the inter - and intra - event coefficients for PGA
44,468
def _get_alpha ( self , C , vs30 , pga_rock ) : alpha = np . zeros ( len ( pga_rock ) ) idx = vs30 < C [ "k1" ] if np . any ( idx ) : af1 = pga_rock [ idx ] + self . CONSTS [ "c" ] * ( ( vs30 [ idx ] / C [ "k1" ] ) ** self . CONSTS [ "n" ] ) af2 = pga_rock [ idx ] + self . CONSTS [ "c" ] alpha [ idx ] = C [ "k2" ] * pga_rock [ idx ] * ( ( 1.0 / af1 ) - ( 1.0 / af2 ) ) return alpha
Returns the alpha the linearised functional relationship between the site amplification and the PGA on rock . Equation 31 .
44,469
def decimal_time ( year , month , day , hour , minute , second ) : tmo = np . ones_like ( year , dtype = int ) tda = np . ones_like ( year , dtype = int ) tho = np . zeros_like ( year , dtype = int ) tmi = np . zeros_like ( year , dtype = int ) tse = np . zeros_like ( year , dtype = float ) if any ( month < 1 ) or any ( month > 12 ) : raise ValueError ( 'Month must be in [1, 12]' ) if any ( day < 1 ) or any ( day > 31 ) : raise ValueError ( 'Day must be in [1, 31]' ) if any ( hour < 0 ) or any ( hour > 24 ) : raise ValueError ( 'Hour must be in [0, 24]' ) if any ( minute < 0 ) or any ( minute > 60 ) : raise ValueError ( 'Minute must be in [0, 60]' ) if any ( second < 0 ) or any ( second > 60 ) : raise ValueError ( 'Second must be in [0, 60]' ) if any ( month ) : tmo = month if any ( day ) : tda = day if any ( hour ) : tho = hour if any ( minute ) : tmi = minute if any ( second ) : tse = second tmonth = tmo - 1 day_count = MARKER_NORMAL [ tmonth ] + tda - 1 id_leap = leap_check ( year ) leap_loc = np . where ( id_leap ) [ 0 ] day_count [ leap_loc ] = MARKER_LEAP [ tmonth [ leap_loc ] ] + tda [ leap_loc ] - 1 year_secs = ( ( day_count . astype ( float ) * SECONDS_PER_DAY ) + tse + ( 60. * tmi . astype ( float ) ) + ( 3600. * tho . astype ( float ) ) ) dtime = year . astype ( float ) + ( year_secs / ( 365. * 24. * 3600. ) ) dtime [ leap_loc ] = year [ leap_loc ] . astype ( float ) + ( year_secs [ leap_loc ] / ( 366. * 24. * 3600. ) ) return dtime
Returns the full time as a decimal value
44,470
def haversine ( lon1 , lat1 , lon2 , lat2 , radians = False , earth_rad = 6371.227 ) : if not radians : cfact = np . pi / 180. lon1 = cfact * lon1 lat1 = cfact * lat1 lon2 = cfact * lon2 lat2 = cfact * lat2 if not np . shape ( lon1 ) : nlocs1 = 1 lon1 = np . array ( [ lon1 ] ) lat1 = np . array ( [ lat1 ] ) else : nlocs1 = np . max ( np . shape ( lon1 ) ) if not np . shape ( lon2 ) : nlocs2 = 1 lon2 = np . array ( [ lon2 ] ) lat2 = np . array ( [ lat2 ] ) else : nlocs2 = np . max ( np . shape ( lon2 ) ) distance = np . zeros ( ( nlocs1 , nlocs2 ) ) i = 0 while i < nlocs2 : dlat = lat1 - lat2 [ i ] dlon = lon1 - lon2 [ i ] aval = ( np . sin ( dlat / 2. ) ** 2. ) + ( np . cos ( lat1 ) * np . cos ( lat2 [ i ] ) * ( np . sin ( dlon / 2. ) ** 2. ) ) distance [ : , i ] = ( 2. * earth_rad * np . arctan2 ( np . sqrt ( aval ) , np . sqrt ( 1 - aval ) ) ) . T i += 1 return distance
Allows to calculate geographical distance using the haversine formula .
44,471
def greg2julian ( year , month , day , hour , minute , second ) : year = year . astype ( float ) month = month . astype ( float ) day = day . astype ( float ) timeut = hour . astype ( float ) + ( minute . astype ( float ) / 60.0 ) + ( second / 3600.0 ) julian_time = ( ( 367.0 * year ) - np . floor ( 7.0 * ( year + np . floor ( ( month + 9.0 ) / 12.0 ) ) / 4.0 ) - np . floor ( 3.0 * ( np . floor ( ( year + ( month - 9.0 ) / 7.0 ) / 100.0 ) + 1.0 ) / 4.0 ) + np . floor ( ( 275.0 * month ) / 9.0 ) + day + 1721028.5 + ( timeut / 24.0 ) ) return julian_time
Function to convert a date from Gregorian to Julian format
44,472
def sample_truncated_gaussian_vector ( data , uncertainties , bounds = None ) : nvals = len ( data ) if bounds : if bounds [ 0 ] is not None : lower_bound = ( bounds [ 0 ] - data ) / uncertainties else : lower_bound = - np . inf * np . ones_like ( data ) if bounds [ 1 ] is not None : upper_bound = ( bounds [ 1 ] - data ) / uncertainties else : upper_bound = np . inf * np . ones_like ( data ) sample = hmtk_truncnorm . rvs ( lower_bound , upper_bound , size = nvals ) else : sample = np . random . normal ( 0. , 1. , nvals ) return data + uncertainties * sample
Samples a Gaussian distribution subject to boundaries on the data
44,473
def hmtk_histogram_2D ( xvalues , yvalues , bins , x_offset = 1.0E-10 , y_offset = 1.0E-10 ) : xbins , ybins = ( bins [ 0 ] - x_offset , bins [ 1 ] - y_offset ) n_x = len ( xbins ) - 1 n_y = len ( ybins ) - 1 counter = np . zeros ( [ n_y , n_x ] , dtype = float ) for j in range ( n_y ) : y_idx = np . logical_and ( yvalues >= ybins [ j ] , yvalues < ybins [ j + 1 ] ) x_vals = xvalues [ y_idx ] for i in range ( n_x ) : idx = np . logical_and ( x_vals >= xbins [ i ] , x_vals < xbins [ i + 1 ] ) counter [ j , i ] += float ( np . sum ( idx ) ) return counter . T
See the explanation for the 1D case - now applied to 2D .
44,474
def bootstrap_histogram_1D ( values , intervals , uncertainties = None , normalisation = False , number_bootstraps = None , boundaries = None ) : if not number_bootstraps or np . all ( np . fabs ( uncertainties < PRECISION ) ) : output = hmtk_histogram_1D ( values , intervals ) if normalisation : output = output / float ( np . sum ( output ) ) else : output = output return output else : temp_hist = np . zeros ( [ len ( intervals ) - 1 , number_bootstraps ] , dtype = float ) for iloc in range ( 0 , number_bootstraps ) : sample = sample_truncated_gaussian_vector ( values , uncertainties , boundaries ) output = hmtk_histogram_1D ( sample , intervals ) temp_hist [ : , iloc ] = output output = np . sum ( temp_hist , axis = 1 ) if normalisation : output = output / float ( np . sum ( output ) ) else : output = output / float ( number_bootstraps ) return output
Bootstrap samples a set of vectors
44,475
def bootstrap_histogram_2D ( xvalues , yvalues , xbins , ybins , boundaries = [ None , None ] , xsigma = None , ysigma = None , normalisation = False , number_bootstraps = None ) : if ( xsigma is None and ysigma is None ) or not number_bootstraps : output = hmtk_histogram_2D ( xvalues , yvalues , bins = ( xbins , ybins ) ) if normalisation : output = output / float ( np . sum ( output ) ) return output else : if xsigma is None : xsigma = np . zeros ( len ( xvalues ) , dtype = float ) if ysigma is None : ysigma = np . zeros ( len ( yvalues ) , dtype = float ) temp_hist = np . zeros ( [ len ( xbins ) - 1 , len ( ybins ) - 1 , number_bootstraps ] , dtype = float ) for iloc in range ( 0 , number_bootstraps ) : xsample = sample_truncated_gaussian_vector ( xvalues , xsigma , boundaries [ 0 ] ) ysample = sample_truncated_gaussian_vector ( yvalues , ysigma , boundaries [ 0 ] ) temp_hist [ : , : , iloc ] = hmtk_histogram_2D ( xsample , ysample , bins = ( xbins , ybins ) ) if normalisation : output = np . sum ( temp_hist , axis = 2 ) output = output / np . sum ( output ) else : output = np . sum ( temp_hist , axis = 2 ) / float ( number_bootstraps ) return output
Calculates a 2D histogram of data allowing for normalisation and bootstrap sampling
44,476
def area_of_polygon ( polygon ) : lon0 = np . mean ( polygon . lons ) lat0 = np . mean ( polygon . lats ) x , y = lonlat_to_laea ( polygon . lons , polygon . lats , lon0 , lat0 ) poly = geometry . Polygon ( zip ( x , y ) ) return poly . area
Returns the area of an OpenQuake polygon in square kilometres
44,477
def lti ( self ) : return { lt : i for i , ( lt , dt ) in enumerate ( self . loss_dt_list ( ) ) }
Dictionary extended_loss_type - > extended_loss_type index
44,478
def loss_maps_dt ( self , dtype = F32 ) : ltypes = self . loss_dt ( dtype ) . names lst = [ ( 'poe-%s' % poe , dtype ) for poe in self . conditional_loss_poes ] return numpy . dtype ( [ ( lt , lst ) for lt in ltypes ] )
Return a composite data type for loss maps
44,479
def gmf_data_dt ( self ) : return numpy . dtype ( [ ( 'rlzi' , U16 ) , ( 'sid' , U32 ) , ( 'eid' , U64 ) , ( 'gmv' , ( F32 , ( len ( self . imtls ) , ) ) ) ] )
Return a composite data type for the GMFs
44,480
def no_imls ( self ) : return all ( numpy . isnan ( ls ) . any ( ) for ls in self . imtls . values ( ) )
Return True if there are no intensity measure levels
44,481
def get_kinds ( self , kind , R ) : stats = self . hazard_stats ( ) if kind == 'stats' : yield from stats return elif kind == 'rlzs' : for r in range ( R ) : yield 'rlz-%d' % r return elif kind : yield kind return if R > 1 and self . individual_curves or not stats : for r in range ( R ) : yield 'rlz-%03d' % r yield from stats
Yield rlz - 000 rlz - 001 ... mean quantile - 0 . 1 ...
44,482
def hazard_stats ( self ) : names = [ ] funcs = [ ] if self . mean_hazard_curves : names . append ( 'mean' ) funcs . append ( stats . mean_curve ) if self . std_hazard_curves : names . append ( 'std' ) funcs . append ( stats . std_curve ) for q in self . quantiles : names . append ( 'quantile-%s' % q ) funcs . append ( functools . partial ( stats . quantile_curve , q ) ) if self . max_hazard_curves : names . append ( 'max' ) funcs . append ( stats . max_curve ) return dict ( zip ( names , funcs ) )
Return a list of item with the statistical functions defined for the hazard calculation
44,483
def is_valid_geometry ( self ) : has_sites = ( self . sites is not None or 'sites' in self . inputs or 'site_model' in self . inputs ) if not has_sites and not self . ground_motion_fields : return True if ( 'gmfs' in self . inputs and not has_sites and not self . inputs [ 'gmfs' ] . endswith ( '.xml' ) ) : raise ValueError ( 'Missing sites or sites_csv in the .ini file' ) elif ( 'risk' in self . calculation_mode or 'damage' in self . calculation_mode or 'bcr' in self . calculation_mode ) : return True flags = dict ( sites = bool ( self . sites ) , sites_csv = self . inputs . get ( 'sites' , 0 ) , hazard_curves_csv = self . inputs . get ( 'hazard_curves' , 0 ) , gmfs_csv = self . inputs . get ( 'gmfs' , 0 ) , region = bool ( self . region and self . region_grid_spacing ) ) return sum ( bool ( v ) for v in flags . values ( ) ) == 1 or self . inputs . get ( 'exposure' ) or self . inputs . get ( 'site_model' )
It is possible to infer the geometry only if exactly one of sites sites_csv hazard_curves_csv gmfs_csv region is set . You did set more than one or nothing .
44,484
def is_valid_intensity_measure_types ( self ) : if self . ground_motion_correlation_model : for imt in self . imtls : if not ( imt . startswith ( 'SA' ) or imt == 'PGA' ) : raise ValueError ( 'Correlation model %s does not accept IMT=%s' % ( self . ground_motion_correlation_model , imt ) ) if self . risk_files : return ( self . intensity_measure_types is None and self . intensity_measure_types_and_levels is None ) elif not hasattr ( self , 'hazard_imtls' ) and not hasattr ( self , 'risk_imtls' ) : return False return True
If the IMTs and levels are extracted from the risk models they must not be set directly . Moreover if intensity_measure_types_and_levels is set directly intensity_measure_types must not be set .
44,485
def is_valid_intensity_measure_levels ( self ) : invalid = self . no_imls ( ) and not self . risk_files and ( self . hazard_curves_from_gmfs or self . calculation_mode in ( 'classical' , 'disaggregation' ) ) return not invalid
In order to compute hazard curves intensity_measure_types_and_levels must be set or extracted from the risk models .
44,486
def is_valid_sites ( self ) : if 'site_model' in self . inputs and 'sites' in self . inputs : return False elif 'site_model' in self . inputs and self . sites : return False elif 'sites' in self . inputs and self . sites : return False elif self . sites and self . region and self . region_grid_spacing : return False else : return True
The sites are overdetermined
44,487
def is_valid_complex_fault_mesh_spacing ( self ) : rms = getattr ( self , 'rupture_mesh_spacing' , None ) if rms and not getattr ( self , 'complex_fault_mesh_spacing' , None ) : self . complex_fault_mesh_spacing = self . rupture_mesh_spacing return True
The complex_fault_mesh_spacing parameter can be None only if rupture_mesh_spacing is set . In that case it is identified with it .
44,488
def is_valid_optimize_same_id_sources ( self ) : if ( self . optimize_same_id_sources and 'classical' in self . calculation_mode or 'disagg' in self . calculation_mode ) : return True elif self . optimize_same_id_sources : return False else : return True
The optimize_same_id_sources can be true only in the classical calculators .
44,489
def check_missing ( self , param , action ) : assert action in ( 'debug' , 'info' , 'warn' , 'error' ) , action if self . inputs . get ( param ) : msg = '%s_file in %s is ignored in %s' % ( param , self . inputs [ 'job_ini' ] , self . calculation_mode ) if action == 'error' : raise InvalidFile ( msg ) else : getattr ( logging , action ) ( msg )
Make sure the given parameter is missing in the job . ini file
44,490
def get_set_num_ruptures ( src ) : if not src . num_ruptures : t0 = time . time ( ) src . num_ruptures = src . count_ruptures ( ) dt = time . time ( ) - t0 clsname = src . __class__ . __name__ if dt > 10 : if 'Area' in clsname : logging . warning ( '%s.count_ruptures took %d seconds, perhaps the ' 'area discretization is too small' , src , dt ) elif 'ComplexFault' in clsname : logging . warning ( '%s.count_ruptures took %d seconds, perhaps the ' 'complex_fault_mesh_spacing is too small' , src , dt ) elif 'SimpleFault' in clsname : logging . warning ( '%s.count_ruptures took %d seconds, perhaps the ' 'rupture_mesh_spacing is too small' , src , dt ) else : logging . warning ( 'count_ruptures %s took %d seconds' , src , dt ) return src . num_ruptures
Extract the number of ruptures and set it
44,491
def mfds2multimfd ( mfds ) : _ , kind = mfds [ 0 ] . tag . split ( '}' ) node = Node ( 'multiMFD' , dict ( kind = kind , size = len ( mfds ) ) ) lengths = None for field in mfd . multi_mfd . ASSOC [ kind ] [ 1 : ] : alias = mfd . multi_mfd . ALIAS . get ( field , field ) if field in ( 'magnitudes' , 'occurRates' ) : data = [ ~ getattr ( m , field ) for m in mfds ] lengths = [ len ( d ) for d in data ] data = sum ( data , [ ] ) else : try : data = [ m [ alias ] for m in mfds ] except KeyError : if alias == 'binWidth' : continue else : raise node . append ( Node ( field , text = collapse ( data ) ) ) if lengths : node . append ( Node ( 'lengths' , text = collapse ( lengths ) ) ) return node
Convert a list of MFD nodes into a single MultiMFD node
44,492
def update ( self , src ) : assert src . tectonic_region_type == self . trt , ( src . tectonic_region_type , self . trt ) if not src . min_mag : src . min_mag = self . min_mag . get ( self . trt ) or self . min_mag [ 'default' ] if ( not isinstance ( src , NonParametricSeismicSource ) and self . rup_interdep == 'mutex' ) : msg = "Mutually exclusive ruptures can only be " msg += "modelled using non-parametric sources" raise ValueError ( msg ) nr = get_set_num_ruptures ( src ) if nr == 0 : return self . tot_ruptures += nr self . sources . append ( src ) _ , max_mag = src . get_min_max_mag ( ) prev_max_mag = self . max_mag if prev_max_mag is None or max_mag > prev_max_mag : self . max_mag = max_mag
Update the attributes sources min_mag max_mag according to the given source .
44,493
def convert_node ( self , node ) : convert = getattr ( self , 'convert_' + striptag ( node . tag ) ) return convert ( node )
Convert the given rupture node into a hazardlib rupture depending on the node tag .
44,494
def convert_simpleFaultRupture ( self , node ) : mag , rake , hypocenter = self . get_mag_rake_hypo ( node ) with context ( self . fname , node ) : surfaces = [ node . simpleFaultGeometry ] rupt = source . rupture . BaseRupture ( mag = mag , rake = rake , tectonic_region_type = None , hypocenter = hypocenter , surface = self . convert_surfaces ( surfaces ) ) return rupt
Convert a simpleFaultRupture node .
44,495
def convert_multiPlanesRupture ( self , node ) : mag , rake , hypocenter = self . get_mag_rake_hypo ( node ) with context ( self . fname , node ) : surfaces = list ( node . getnodes ( 'planarSurface' ) ) rupt = source . rupture . BaseRupture ( mag = mag , rake = rake , tectonic_region_type = None , hypocenter = hypocenter , surface = self . convert_surfaces ( surfaces ) ) return rupt
Convert a multiPlanesRupture node .
44,496
def get_tom ( self , node ) : if 'tom' in node . attrib : tom_cls = tom . registry [ node [ 'tom' ] ] else : tom_cls = tom . registry [ 'PoissonTOM' ] return tom_cls ( time_span = self . investigation_time , occurrence_rate = node . get ( 'occurrence_rate' ) )
Convert the given node into a Temporal Occurrence Model object .
44,497
def convert_mfdist ( self , node ) : with context ( self . fname , node ) : [ mfd_node ] = [ subnode for subnode in node if subnode . tag . endswith ( ( 'incrementalMFD' , 'truncGutenbergRichterMFD' , 'arbitraryMFD' , 'YoungsCoppersmithMFD' , 'multiMFD' ) ) ] if mfd_node . tag . endswith ( 'incrementalMFD' ) : return mfd . EvenlyDiscretizedMFD ( min_mag = mfd_node [ 'minMag' ] , bin_width = mfd_node [ 'binWidth' ] , occurrence_rates = ~ mfd_node . occurRates ) elif mfd_node . tag . endswith ( 'truncGutenbergRichterMFD' ) : return mfd . TruncatedGRMFD ( a_val = mfd_node [ 'aValue' ] , b_val = mfd_node [ 'bValue' ] , min_mag = mfd_node [ 'minMag' ] , max_mag = mfd_node [ 'maxMag' ] , bin_width = self . width_of_mfd_bin ) elif mfd_node . tag . endswith ( 'arbitraryMFD' ) : return mfd . ArbitraryMFD ( magnitudes = ~ mfd_node . magnitudes , occurrence_rates = ~ mfd_node . occurRates ) elif mfd_node . tag . endswith ( 'YoungsCoppersmithMFD' ) : if "totalMomentRate" in mfd_node . attrib . keys ( ) : return mfd . YoungsCoppersmith1985MFD . from_total_moment_rate ( min_mag = mfd_node [ "minMag" ] , b_val = mfd_node [ "bValue" ] , char_mag = mfd_node [ "characteristicMag" ] , total_moment_rate = mfd_node [ "totalMomentRate" ] , bin_width = mfd_node [ "binWidth" ] ) elif "characteristicRate" in mfd_node . attrib . keys ( ) : return mfd . YoungsCoppersmith1985MFD . from_characteristic_rate ( min_mag = mfd_node [ "minMag" ] , b_val = mfd_node [ "bValue" ] , char_mag = mfd_node [ "characteristicMag" ] , char_rate = mfd_node [ "characteristicRate" ] , bin_width = mfd_node [ "binWidth" ] ) elif mfd_node . tag . endswith ( 'multiMFD' ) : return mfd . multi_mfd . MultiMFD . from_node ( mfd_node , self . width_of_mfd_bin )
Convert the given node into a Magnitude - Frequency Distribution object .
44,498
def convert_npdist ( self , node ) : with context ( self . fname , node ) : npdist = [ ] for np in node . nodalPlaneDist : prob , strike , dip , rake = ( np [ 'probability' ] , np [ 'strike' ] , np [ 'dip' ] , np [ 'rake' ] ) npdist . append ( ( prob , geo . NodalPlane ( strike , dip , rake ) ) ) if not self . spinning_floating : npdist = [ ( 1 , npdist [ 0 ] [ 1 ] ) ] return pmf . PMF ( npdist )
Convert the given node into a Nodal Plane Distribution .
44,499
def convert_hpdist ( self , node ) : with context ( self . fname , node ) : hcdist = [ ( hd [ 'probability' ] , hd [ 'depth' ] ) for hd in node . hypoDepthDist ] if not self . spinning_floating : hcdist = [ ( 1 , hcdist [ 0 ] [ 1 ] ) ] return pmf . PMF ( hcdist )
Convert the given node into a probability mass function for the hypo depth distribution .