idx
int64
0
251k
question
stringlengths
53
3.53k
target
stringlengths
5
1.23k
len_question
int64
20
893
len_target
int64
3
238
232,400
def bootstrap_histogram_2D ( xvalues , yvalues , xbins , ybins , boundaries = [ None , None ] , xsigma = None , ysigma = None , normalisation = False , number_bootstraps = None ) : if ( xsigma is None and ysigma is None ) or not number_bootstraps : # No sampling - return simple 2-D histrogram #output = np.histogram2d(xvalues, yvalues, bins=[xbins, ybins])[0] output = hmtk_histogram_2D ( xvalues , yvalues , bins = ( xbins , ybins ) ) if normalisation : output = output / float ( np . sum ( output ) ) return output else : if xsigma is None : xsigma = np . zeros ( len ( xvalues ) , dtype = float ) if ysigma is None : ysigma = np . zeros ( len ( yvalues ) , dtype = float ) temp_hist = np . zeros ( [ len ( xbins ) - 1 , len ( ybins ) - 1 , number_bootstraps ] , dtype = float ) for iloc in range ( 0 , number_bootstraps ) : xsample = sample_truncated_gaussian_vector ( xvalues , xsigma , boundaries [ 0 ] ) ysample = sample_truncated_gaussian_vector ( yvalues , ysigma , boundaries [ 0 ] ) # temp_hist[:, :, iloc] = np.histogram2d(xsample, # ysample, # bins=[xbins, ybins])[0] temp_hist [ : , : , iloc ] = hmtk_histogram_2D ( xsample , ysample , bins = ( xbins , ybins ) ) if normalisation : output = np . sum ( temp_hist , axis = 2 ) output = output / np . sum ( output ) else : output = np . sum ( temp_hist , axis = 2 ) / float ( number_bootstraps ) return output
Calculates a 2D histogram of data allowing for normalisation and bootstrap sampling
465
18
232,401
def area_of_polygon ( polygon ) : lon0 = np . mean ( polygon . lons ) lat0 = np . mean ( polygon . lats ) # Transform to lamber equal area projection x , y = lonlat_to_laea ( polygon . lons , polygon . lats , lon0 , lat0 ) # Build shapely polygons poly = geometry . Polygon ( zip ( x , y ) ) return poly . area
Returns the area of an OpenQuake polygon in square kilometres
103
13
232,402
def lti ( self ) : return { lt : i for i , ( lt , dt ) in enumerate ( self . loss_dt_list ( ) ) }
Dictionary extended_loss_type - > extended_loss_type index
38
15
232,403
def loss_maps_dt ( self , dtype = F32 ) : ltypes = self . loss_dt ( dtype ) . names lst = [ ( 'poe-%s' % poe , dtype ) for poe in self . conditional_loss_poes ] return numpy . dtype ( [ ( lt , lst ) for lt in ltypes ] )
Return a composite data type for loss maps
85
8
232,404
def gmf_data_dt ( self ) : return numpy . dtype ( [ ( 'rlzi' , U16 ) , ( 'sid' , U32 ) , ( 'eid' , U64 ) , ( 'gmv' , ( F32 , ( len ( self . imtls ) , ) ) ) ] )
Return a composite data type for the GMFs
74
9
232,405
def no_imls ( self ) : return all ( numpy . isnan ( ls ) . any ( ) for ls in self . imtls . values ( ) )
Return True if there are no intensity measure levels
37
9
232,406
def get_kinds ( self , kind , R ) : stats = self . hazard_stats ( ) if kind == 'stats' : yield from stats return elif kind == 'rlzs' : for r in range ( R ) : yield 'rlz-%d' % r return elif kind : yield kind return # default: yield stats (and realizations if required) if R > 1 and self . individual_curves or not stats : for r in range ( R ) : yield 'rlz-%03d' % r yield from stats
Yield rlz - 000 rlz - 001 ... mean quantile - 0 . 1 ...
118
22
232,407
def hazard_stats ( self ) : names = [ ] # name of statistical functions funcs = [ ] # statistical functions of kind func(values, weights) if self . mean_hazard_curves : names . append ( 'mean' ) funcs . append ( stats . mean_curve ) if self . std_hazard_curves : names . append ( 'std' ) funcs . append ( stats . std_curve ) for q in self . quantiles : names . append ( 'quantile-%s' % q ) funcs . append ( functools . partial ( stats . quantile_curve , q ) ) if self . max_hazard_curves : names . append ( 'max' ) funcs . append ( stats . max_curve ) return dict ( zip ( names , funcs ) )
Return a list of item with the statistical functions defined for the hazard calculation
178
14
232,408
def is_valid_geometry ( self ) : has_sites = ( self . sites is not None or 'sites' in self . inputs or 'site_model' in self . inputs ) if not has_sites and not self . ground_motion_fields : # when generating only the ruptures you do not need the sites return True if ( 'gmfs' in self . inputs and not has_sites and not self . inputs [ 'gmfs' ] . endswith ( '.xml' ) ) : raise ValueError ( 'Missing sites or sites_csv in the .ini file' ) elif ( 'risk' in self . calculation_mode or 'damage' in self . calculation_mode or 'bcr' in self . calculation_mode ) : return True # no check on the sites for risk flags = dict ( sites = bool ( self . sites ) , sites_csv = self . inputs . get ( 'sites' , 0 ) , hazard_curves_csv = self . inputs . get ( 'hazard_curves' , 0 ) , gmfs_csv = self . inputs . get ( 'gmfs' , 0 ) , region = bool ( self . region and self . region_grid_spacing ) ) # NB: below we check that all the flags # are mutually exclusive return sum ( bool ( v ) for v in flags . values ( ) ) == 1 or self . inputs . get ( 'exposure' ) or self . inputs . get ( 'site_model' )
It is possible to infer the geometry only if exactly one of sites sites_csv hazard_curves_csv gmfs_csv region is set . You did set more than one or nothing .
319
40
232,409
def is_valid_intensity_measure_types ( self ) : if self . ground_motion_correlation_model : for imt in self . imtls : if not ( imt . startswith ( 'SA' ) or imt == 'PGA' ) : raise ValueError ( 'Correlation model %s does not accept IMT=%s' % ( self . ground_motion_correlation_model , imt ) ) if self . risk_files : # IMTLs extracted from the risk files return ( self . intensity_measure_types is None and self . intensity_measure_types_and_levels is None ) elif not hasattr ( self , 'hazard_imtls' ) and not hasattr ( self , 'risk_imtls' ) : return False return True
If the IMTs and levels are extracted from the risk models they must not be set directly . Moreover if intensity_measure_types_and_levels is set directly intensity_measure_types must not be set .
177
45
232,410
def is_valid_intensity_measure_levels ( self ) : invalid = self . no_imls ( ) and not self . risk_files and ( self . hazard_curves_from_gmfs or self . calculation_mode in ( 'classical' , 'disaggregation' ) ) return not invalid
In order to compute hazard curves intensity_measure_types_and_levels must be set or extracted from the risk models .
68
26
232,411
def is_valid_sites ( self ) : if 'site_model' in self . inputs and 'sites' in self . inputs : return False elif 'site_model' in self . inputs and self . sites : return False elif 'sites' in self . inputs and self . sites : return False elif self . sites and self . region and self . region_grid_spacing : return False else : return True
The sites are overdetermined
90
5
232,412
def is_valid_complex_fault_mesh_spacing ( self ) : rms = getattr ( self , 'rupture_mesh_spacing' , None ) if rms and not getattr ( self , 'complex_fault_mesh_spacing' , None ) : self . complex_fault_mesh_spacing = self . rupture_mesh_spacing return True
The complex_fault_mesh_spacing parameter can be None only if rupture_mesh_spacing is set . In that case it is identified with it .
90
36
232,413
def is_valid_optimize_same_id_sources ( self ) : if ( self . optimize_same_id_sources and 'classical' in self . calculation_mode or 'disagg' in self . calculation_mode ) : return True elif self . optimize_same_id_sources : return False else : return True
The optimize_same_id_sources can be true only in the classical calculators .
75
19
232,414
def check_missing ( self , param , action ) : assert action in ( 'debug' , 'info' , 'warn' , 'error' ) , action if self . inputs . get ( param ) : msg = '%s_file in %s is ignored in %s' % ( param , self . inputs [ 'job_ini' ] , self . calculation_mode ) if action == 'error' : raise InvalidFile ( msg ) else : getattr ( logging , action ) ( msg )
Make sure the given parameter is missing in the job . ini file
106
14
232,415
def get_set_num_ruptures ( src ) : if not src . num_ruptures : t0 = time . time ( ) src . num_ruptures = src . count_ruptures ( ) dt = time . time ( ) - t0 clsname = src . __class__ . __name__ if dt > 10 : if 'Area' in clsname : logging . warning ( '%s.count_ruptures took %d seconds, perhaps the ' 'area discretization is too small' , src , dt ) elif 'ComplexFault' in clsname : logging . warning ( '%s.count_ruptures took %d seconds, perhaps the ' 'complex_fault_mesh_spacing is too small' , src , dt ) elif 'SimpleFault' in clsname : logging . warning ( '%s.count_ruptures took %d seconds, perhaps the ' 'rupture_mesh_spacing is too small' , src , dt ) else : # multiPointSource logging . warning ( 'count_ruptures %s took %d seconds' , src , dt ) return src . num_ruptures
Extract the number of ruptures and set it
259
10
232,416
def mfds2multimfd ( mfds ) : _ , kind = mfds [ 0 ] . tag . split ( '}' ) node = Node ( 'multiMFD' , dict ( kind = kind , size = len ( mfds ) ) ) lengths = None for field in mfd . multi_mfd . ASSOC [ kind ] [ 1 : ] : alias = mfd . multi_mfd . ALIAS . get ( field , field ) if field in ( 'magnitudes' , 'occurRates' ) : data = [ ~ getattr ( m , field ) for m in mfds ] lengths = [ len ( d ) for d in data ] data = sum ( data , [ ] ) # list of lists else : try : data = [ m [ alias ] for m in mfds ] except KeyError : if alias == 'binWidth' : # missing bindWidth in GR MDFs is ok continue else : raise node . append ( Node ( field , text = collapse ( data ) ) ) if lengths : # this is the last field if present node . append ( Node ( 'lengths' , text = collapse ( lengths ) ) ) return node
Convert a list of MFD nodes into a single MultiMFD node
256
15
232,417
def update ( self , src ) : assert src . tectonic_region_type == self . trt , ( src . tectonic_region_type , self . trt ) if not src . min_mag : # if not set already src . min_mag = self . min_mag . get ( self . trt ) or self . min_mag [ 'default' ] # checking mutex ruptures if ( not isinstance ( src , NonParametricSeismicSource ) and self . rup_interdep == 'mutex' ) : msg = "Mutually exclusive ruptures can only be " msg += "modelled using non-parametric sources" raise ValueError ( msg ) nr = get_set_num_ruptures ( src ) if nr == 0 : # the minimum_magnitude filters all ruptures return self . tot_ruptures += nr self . sources . append ( src ) _ , max_mag = src . get_min_max_mag ( ) prev_max_mag = self . max_mag if prev_max_mag is None or max_mag > prev_max_mag : self . max_mag = max_mag
Update the attributes sources min_mag max_mag according to the given source .
255
16
232,418
def convert_node ( self , node ) : convert = getattr ( self , 'convert_' + striptag ( node . tag ) ) return convert ( node )
Convert the given rupture node into a hazardlib rupture depending on the node tag .
37
17
232,419
def convert_simpleFaultRupture ( self , node ) : mag , rake , hypocenter = self . get_mag_rake_hypo ( node ) with context ( self . fname , node ) : surfaces = [ node . simpleFaultGeometry ] rupt = source . rupture . BaseRupture ( mag = mag , rake = rake , tectonic_region_type = None , hypocenter = hypocenter , surface = self . convert_surfaces ( surfaces ) ) return rupt
Convert a simpleFaultRupture node .
106
11
232,420
def convert_multiPlanesRupture ( self , node ) : mag , rake , hypocenter = self . get_mag_rake_hypo ( node ) with context ( self . fname , node ) : surfaces = list ( node . getnodes ( 'planarSurface' ) ) rupt = source . rupture . BaseRupture ( mag = mag , rake = rake , tectonic_region_type = None , hypocenter = hypocenter , surface = self . convert_surfaces ( surfaces ) ) return rupt
Convert a multiPlanesRupture node .
113
11
232,421
def get_tom ( self , node ) : if 'tom' in node . attrib : tom_cls = tom . registry [ node [ 'tom' ] ] else : tom_cls = tom . registry [ 'PoissonTOM' ] return tom_cls ( time_span = self . investigation_time , occurrence_rate = node . get ( 'occurrence_rate' ) )
Convert the given node into a Temporal Occurrence Model object .
86
14
232,422
def convert_mfdist ( self , node ) : with context ( self . fname , node ) : [ mfd_node ] = [ subnode for subnode in node if subnode . tag . endswith ( ( 'incrementalMFD' , 'truncGutenbergRichterMFD' , 'arbitraryMFD' , 'YoungsCoppersmithMFD' , 'multiMFD' ) ) ] if mfd_node . tag . endswith ( 'incrementalMFD' ) : return mfd . EvenlyDiscretizedMFD ( min_mag = mfd_node [ 'minMag' ] , bin_width = mfd_node [ 'binWidth' ] , occurrence_rates = ~ mfd_node . occurRates ) elif mfd_node . tag . endswith ( 'truncGutenbergRichterMFD' ) : return mfd . TruncatedGRMFD ( a_val = mfd_node [ 'aValue' ] , b_val = mfd_node [ 'bValue' ] , min_mag = mfd_node [ 'minMag' ] , max_mag = mfd_node [ 'maxMag' ] , bin_width = self . width_of_mfd_bin ) elif mfd_node . tag . endswith ( 'arbitraryMFD' ) : return mfd . ArbitraryMFD ( magnitudes = ~ mfd_node . magnitudes , occurrence_rates = ~ mfd_node . occurRates ) elif mfd_node . tag . endswith ( 'YoungsCoppersmithMFD' ) : if "totalMomentRate" in mfd_node . attrib . keys ( ) : # Return Youngs & Coppersmith from the total moment rate return mfd . YoungsCoppersmith1985MFD . from_total_moment_rate ( min_mag = mfd_node [ "minMag" ] , b_val = mfd_node [ "bValue" ] , char_mag = mfd_node [ "characteristicMag" ] , total_moment_rate = mfd_node [ "totalMomentRate" ] , bin_width = mfd_node [ "binWidth" ] ) elif "characteristicRate" in mfd_node . attrib . keys ( ) : # Return Youngs & Coppersmith from the total moment rate return mfd . YoungsCoppersmith1985MFD . from_characteristic_rate ( min_mag = mfd_node [ "minMag" ] , b_val = mfd_node [ "bValue" ] , char_mag = mfd_node [ "characteristicMag" ] , char_rate = mfd_node [ "characteristicRate" ] , bin_width = mfd_node [ "binWidth" ] ) elif mfd_node . tag . endswith ( 'multiMFD' ) : return mfd . multi_mfd . MultiMFD . from_node ( mfd_node , self . width_of_mfd_bin )
Convert the given node into a Magnitude - Frequency Distribution object .
680
14
232,423
def convert_npdist ( self , node ) : with context ( self . fname , node ) : npdist = [ ] for np in node . nodalPlaneDist : prob , strike , dip , rake = ( np [ 'probability' ] , np [ 'strike' ] , np [ 'dip' ] , np [ 'rake' ] ) npdist . append ( ( prob , geo . NodalPlane ( strike , dip , rake ) ) ) if not self . spinning_floating : npdist = [ ( 1 , npdist [ 0 ] [ 1 ] ) ] # consider the first nodal plane return pmf . PMF ( npdist )
Convert the given node into a Nodal Plane Distribution .
145
13
232,424
def convert_hpdist ( self , node ) : with context ( self . fname , node ) : hcdist = [ ( hd [ 'probability' ] , hd [ 'depth' ] ) for hd in node . hypoDepthDist ] if not self . spinning_floating : # consider the first hypocenter hcdist = [ ( 1 , hcdist [ 0 ] [ 1 ] ) ] return pmf . PMF ( hcdist )
Convert the given node into a probability mass function for the hypo depth distribution .
102
17
232,425
def convert_areaSource ( self , node ) : geom = node . areaGeometry coords = split_coords_2d ( ~ geom . Polygon . exterior . LinearRing . posList ) polygon = geo . Polygon ( [ geo . Point ( * xy ) for xy in coords ] ) msr = valid . SCALEREL [ ~ node . magScaleRel ] ( ) area_discretization = geom . attrib . get ( 'discretization' , self . area_source_discretization ) if area_discretization is None : raise ValueError ( 'The source %r has no `discretization` parameter and the job.' 'ini file has no `area_source_discretization` parameter either' % node [ 'id' ] ) return source . AreaSource ( source_id = node [ 'id' ] , name = node [ 'name' ] , tectonic_region_type = node . attrib . get ( 'tectonicRegion' ) , mfd = self . convert_mfdist ( node ) , rupture_mesh_spacing = self . rupture_mesh_spacing , magnitude_scaling_relationship = msr , rupture_aspect_ratio = ~ node . ruptAspectRatio , upper_seismogenic_depth = ~ geom . upperSeismoDepth , lower_seismogenic_depth = ~ geom . lowerSeismoDepth , nodal_plane_distribution = self . convert_npdist ( node ) , hypocenter_distribution = self . convert_hpdist ( node ) , polygon = polygon , area_discretization = area_discretization , temporal_occurrence_model = self . get_tom ( node ) )
Convert the given node into an area source object .
385
11
232,426
def convert_pointSource ( self , node ) : geom = node . pointGeometry lon_lat = ~ geom . Point . pos msr = valid . SCALEREL [ ~ node . magScaleRel ] ( ) return source . PointSource ( source_id = node [ 'id' ] , name = node [ 'name' ] , tectonic_region_type = node . attrib . get ( 'tectonicRegion' ) , mfd = self . convert_mfdist ( node ) , rupture_mesh_spacing = self . rupture_mesh_spacing , magnitude_scaling_relationship = msr , rupture_aspect_ratio = ~ node . ruptAspectRatio , upper_seismogenic_depth = ~ geom . upperSeismoDepth , lower_seismogenic_depth = ~ geom . lowerSeismoDepth , location = geo . Point ( * lon_lat ) , nodal_plane_distribution = self . convert_npdist ( node ) , hypocenter_distribution = self . convert_hpdist ( node ) , temporal_occurrence_model = self . get_tom ( node ) )
Convert the given node into a point source object .
256
11
232,427
def convert_multiPointSource ( self , node ) : geom = node . multiPointGeometry lons , lats = zip ( * split_coords_2d ( ~ geom . posList ) ) msr = valid . SCALEREL [ ~ node . magScaleRel ] ( ) return source . MultiPointSource ( source_id = node [ 'id' ] , name = node [ 'name' ] , tectonic_region_type = node . attrib . get ( 'tectonicRegion' ) , mfd = self . convert_mfdist ( node ) , magnitude_scaling_relationship = msr , rupture_aspect_ratio = ~ node . ruptAspectRatio , upper_seismogenic_depth = ~ geom . upperSeismoDepth , lower_seismogenic_depth = ~ geom . lowerSeismoDepth , nodal_plane_distribution = self . convert_npdist ( node ) , hypocenter_distribution = self . convert_hpdist ( node ) , mesh = geo . Mesh ( F32 ( lons ) , F32 ( lats ) ) , temporal_occurrence_model = self . get_tom ( node ) )
Convert the given node into a MultiPointSource object .
262
12
232,428
def convert_simpleFaultSource ( self , node ) : geom = node . simpleFaultGeometry msr = valid . SCALEREL [ ~ node . magScaleRel ] ( ) fault_trace = self . geo_line ( geom ) mfd = self . convert_mfdist ( node ) with context ( self . fname , node ) : try : hypo_list = valid . hypo_list ( node . hypoList ) except AttributeError : hypo_list = ( ) try : slip_list = valid . slip_list ( node . slipList ) except AttributeError : slip_list = ( ) simple = source . SimpleFaultSource ( source_id = node [ 'id' ] , name = node [ 'name' ] , tectonic_region_type = node . attrib . get ( 'tectonicRegion' ) , mfd = mfd , rupture_mesh_spacing = self . rupture_mesh_spacing , magnitude_scaling_relationship = msr , rupture_aspect_ratio = ~ node . ruptAspectRatio , upper_seismogenic_depth = ~ geom . upperSeismoDepth , lower_seismogenic_depth = ~ geom . lowerSeismoDepth , fault_trace = fault_trace , dip = ~ geom . dip , rake = ~ node . rake , temporal_occurrence_model = self . get_tom ( node ) , hypo_list = hypo_list , slip_list = slip_list ) return simple
Convert the given node into a simple fault object .
336
11
232,429
def convert_complexFaultSource ( self , node ) : geom = node . complexFaultGeometry edges = self . geo_lines ( geom ) mfd = self . convert_mfdist ( node ) msr = valid . SCALEREL [ ~ node . magScaleRel ] ( ) with context ( self . fname , node ) : cmplx = source . ComplexFaultSource ( source_id = node [ 'id' ] , name = node [ 'name' ] , tectonic_region_type = node . attrib . get ( 'tectonicRegion' ) , mfd = mfd , rupture_mesh_spacing = self . complex_fault_mesh_spacing , magnitude_scaling_relationship = msr , rupture_aspect_ratio = ~ node . ruptAspectRatio , edges = edges , rake = ~ node . rake , temporal_occurrence_model = self . get_tom ( node ) ) return cmplx
Convert the given node into a complex fault object .
217
11
232,430
def convert_characteristicFaultSource ( self , node ) : char = source . CharacteristicFaultSource ( source_id = node [ 'id' ] , name = node [ 'name' ] , tectonic_region_type = node . attrib . get ( 'tectonicRegion' ) , mfd = self . convert_mfdist ( node ) , surface = self . convert_surfaces ( node . surface ) , rake = ~ node . rake , temporal_occurrence_model = self . get_tom ( node ) ) return char
Convert the given node into a characteristic fault object .
120
11
232,431
def convert_nonParametricSeismicSource ( self , node ) : trt = node . attrib . get ( 'tectonicRegion' ) rup_pmf_data = [ ] rups_weights = None if 'rup_weights' in node . attrib : tmp = node . attrib . get ( 'rup_weights' ) rups_weights = numpy . array ( [ float ( s ) for s in tmp . split ( ) ] ) for i , rupnode in enumerate ( node ) : probs = pmf . PMF ( valid . pmf ( rupnode [ 'probs_occur' ] ) ) rup = RuptureConverter . convert_node ( self , rupnode ) rup . tectonic_region_type = trt rup . weight = None if rups_weights is None else rups_weights [ i ] rup_pmf_data . append ( ( rup , probs ) ) nps = source . NonParametricSeismicSource ( node [ 'id' ] , node [ 'name' ] , trt , rup_pmf_data ) nps . splittable = 'rup_weights' not in node . attrib return nps
Convert the given node into a non parametric source object .
273
13
232,432
def convert_sourceGroup ( self , node ) : trt = node [ 'tectonicRegion' ] srcs_weights = node . attrib . get ( 'srcs_weights' ) grp_attrs = { k : v for k , v in node . attrib . items ( ) if k not in ( 'name' , 'src_interdep' , 'rup_interdep' , 'srcs_weights' ) } sg = SourceGroup ( trt , min_mag = self . minimum_magnitude ) sg . temporal_occurrence_model = self . get_tom ( node ) sg . name = node . attrib . get ( 'name' ) # Set attributes related to occurrence sg . src_interdep = node . attrib . get ( 'src_interdep' , 'indep' ) sg . rup_interdep = node . attrib . get ( 'rup_interdep' , 'indep' ) sg . grp_probability = node . attrib . get ( 'grp_probability' ) # Set the cluster attribute sg . cluster = node . attrib . get ( 'cluster' ) == 'true' # Filter admitted cases # 1. The source group is a cluster. In this case the cluster must have # the attributes required to define its occurrence in time. if sg . cluster : msg = 'A cluster group requires the definition of a temporal' msg += ' occurrence model' assert 'tom' in node . attrib , msg if isinstance ( tom , PoissonTOM ) : assert hasattr ( sg , 'occurrence_rate' ) # for src_node in node : if self . source_id and self . source_id != src_node [ 'id' ] : continue # filter by source_id src = self . convert_node ( src_node ) # transmit the group attributes to the underlying source for attr , value in grp_attrs . items ( ) : if attr == 'tectonicRegion' : src_trt = src_node . get ( 'tectonicRegion' ) if src_trt and src_trt != trt : with context ( self . fname , src_node ) : raise ValueError ( 'Found %s, expected %s' % ( src_node [ 'tectonicRegion' ] , trt ) ) src . tectonic_region_type = trt elif attr == 'grp_probability' : pass # do not transmit else : # transmit as it is setattr ( src , attr , node [ attr ] ) sg . update ( src ) if srcs_weights is not None : if len ( node ) and len ( srcs_weights ) != len ( node ) : raise ValueError ( 'There are %d srcs_weights but %d source(s) in %s' % ( len ( srcs_weights ) , len ( node ) , self . fname ) ) for src , sw in zip ( sg , srcs_weights ) : src . mutex_weight = sw # check that, when the cluster option is set, the group has a temporal # occurrence model properly defined if sg . cluster and not hasattr ( sg , 'temporal_occurrence_model' ) : msg = 'The Source Group is a cluster but does not have a ' msg += 'temporal occurrence model' raise ValueError ( msg ) return sg
Convert the given node into a SourceGroup object .
752
11
232,433
def _check_list_weights ( parameter , name ) : if not isinstance ( parameter , list ) : raise ValueError ( '%s must be formatted with a list of tuples' % name ) weight = np . sum ( [ val [ 1 ] for val in parameter ] ) if fabs ( weight - 1. ) > 1E-8 : raise ValueError ( '%s weights do not sum to 1.0!' % name ) return parameter
Checks that the weights in a list of tuples sums to 1 . 0
96
16
232,434
def build_fault_model ( self , collapse = False , rendered_msr = WC1994 ( ) , mfd_config = None ) : self . source_model = mtkSourceModel ( self . id , self . name ) for fault in self . faults : fault . generate_recurrence_models ( collapse , config = mfd_config , rendered_msr = rendered_msr ) src_model , src_weight = fault . generate_fault_source_model ( ) for iloc , model in enumerate ( src_model ) : new_model = deepcopy ( model ) new_model . id = str ( model . id ) + '_%g' % ( iloc + 1 ) new_model . mfd . occurrence_rates = ( np . array ( new_model . mfd . occurrence_rates ) * src_weight [ iloc ] ) . tolist ( ) self . source_model . sources . append ( new_model )
Constructs a full fault model with epistemic uncertainty by enumerating all the possible recurrence models of each fault as separate faults with the recurrence rates multiplied by the corresponding weights .
209
36
232,435
def read_file ( self , start_year = None , end_year = None , use_centroid = None ) : raw_data = getlines ( self . filename ) num_lines = len ( raw_data ) if ( ( float ( num_lines ) / 5. ) - float ( num_lines / 5 ) ) > 1E-9 : raise IOError ( 'GCMT represented by 5 lines - number in file not' ' a multiple of 5!' ) self . catalogue . number_gcmts = num_lines // 5 self . catalogue . gcmts = [ None ] * self . catalogue . number_gcmts # Pre-allocates list id0 = 0 print ( 'Parsing catalogue ...' ) for iloc in range ( 0 , self . catalogue . number_gcmts ) : self . catalogue . gcmts [ iloc ] = self . read_ndk_event ( raw_data , id0 ) id0 += 5 print ( 'complete. Contains %s moment tensors' % self . catalogue . get_number_tensors ( ) ) if not start_year : min_years = [ ] min_years = [ cent . centroid . date . year for cent in self . catalogue . gcmts ] self . catalogue . start_year = np . min ( min_years ) if not end_year : max_years = [ ] max_years = [ cent . centroid . date . year for cent in self . catalogue . gcmts ] self . catalogue . end_year = np . max ( max_years ) self . to_hmtk ( use_centroid ) return self . catalogue
Reads the file
356
4
232,436
def read_ndk_event ( self , raw_data , id0 ) : gcmt = GCMTEvent ( ) # Get hypocentre ndkstring = raw_data [ id0 ] . rstrip ( '\n' ) gcmt . hypocentre = self . _read_hypocentre_from_ndk_string ( ndkstring ) # GCMT metadata ndkstring = raw_data [ id0 + 1 ] . rstrip ( '\n' ) gcmt = self . _get_metadata_from_ndk_string ( gcmt , ndkstring ) # Get Centroid ndkstring = raw_data [ id0 + 2 ] . rstrip ( '\n' ) gcmt . centroid = self . _read_centroid_from_ndk_string ( ndkstring , gcmt . hypocentre ) # Get Moment Tensor ndkstring = raw_data [ id0 + 3 ] . rstrip ( '\n' ) gcmt . moment_tensor = self . _get_moment_tensor_from_ndk_string ( ndkstring ) # Get principal axes ndkstring = raw_data [ id0 + 4 ] . rstrip ( '\n' ) gcmt . principal_axes = self . _get_principal_axes_from_ndk_string ( ndkstring [ 3 : 48 ] , exponent = gcmt . moment_tensor . exponent ) # Get Nodal Planes gcmt . nodal_planes = self . _get_nodal_planes_from_ndk_string ( ndkstring [ 57 : ] ) # Get Moment and Magnitude gcmt . moment , gcmt . version , gcmt . magnitude = self . _get_moment_from_ndk_string ( ndkstring , gcmt . moment_tensor . exponent ) return gcmt
Reads a 5 - line batch of data into a set of GCMTs
425
16
232,437
def _read_hypocentre_from_ndk_string ( self , linestring ) : hypo = GCMTHypocentre ( ) hypo . source = linestring [ 0 : 4 ] hypo . date = _read_date_from_string ( linestring [ 5 : 15 ] ) hypo . time = _read_time_from_string ( linestring [ 16 : 26 ] ) hypo . latitude = float ( linestring [ 27 : 33 ] ) hypo . longitude = float ( linestring [ 34 : 41 ] ) hypo . depth = float ( linestring [ 42 : 47 ] ) magnitudes = [ float ( x ) for x in linestring [ 48 : 55 ] . split ( ' ' ) ] if magnitudes [ 0 ] > 0. : hypo . m_b = magnitudes [ 0 ] if magnitudes [ 1 ] > 0. : hypo . m_s = magnitudes [ 1 ] hypo . location = linestring [ 56 : ] return hypo
Reads the hypocentre data from the ndk string to return an instance of the GCMTHypocentre class
226
25
232,438
def _get_metadata_from_ndk_string ( self , gcmt , ndk_string ) : gcmt . identifier = ndk_string [ : 16 ] inversion_data = re . split ( '[A-Z:]+' , ndk_string [ 17 : 61 ] ) gcmt . metadata [ 'BODY' ] = [ float ( x ) for x in inversion_data [ 1 ] . split ( ) ] gcmt . metadata [ 'SURFACE' ] = [ float ( x ) for x in inversion_data [ 2 ] . split ( ) ] gcmt . metadata [ 'MANTLE' ] = [ float ( x ) for x in inversion_data [ 3 ] . split ( ) ] further_meta = re . split ( '[: ]+' , ndk_string [ 62 : ] ) gcmt . metadata [ 'CMT' ] = int ( further_meta [ 1 ] ) gcmt . metadata [ 'FUNCTION' ] = { 'TYPE' : further_meta [ 2 ] , 'DURATION' : float ( further_meta [ 3 ] ) } return gcmt
Reads the GCMT metadata from line 2 of the ndk batch
253
14
232,439
def _get_principal_axes_from_ndk_string ( self , ndk_string , exponent ) : axes = GCMTPrincipalAxes ( ) # The principal axes is defined in characters 3:48 of the 5th line exponent = 10. ** exponent axes . t_axis = { 'eigenvalue' : exponent * float ( ndk_string [ 0 : 8 ] ) , 'plunge' : float ( ndk_string [ 8 : 11 ] ) , 'azimuth' : float ( ndk_string [ 11 : 15 ] ) } axes . b_axis = { 'eigenvalue' : exponent * float ( ndk_string [ 15 : 23 ] ) , 'plunge' : float ( ndk_string [ 23 : 26 ] ) , 'azimuth' : float ( ndk_string [ 26 : 30 ] ) } axes . p_axis = { 'eigenvalue' : exponent * float ( ndk_string [ 30 : 38 ] ) , 'plunge' : float ( ndk_string [ 38 : 41 ] ) , 'azimuth' : float ( ndk_string [ 41 : ] ) } return axes
Gets the principal axes from the ndk string and returns an instance of the GCMTPrincipalAxes class
259
23
232,440
def _get_moment_from_ndk_string ( self , ndk_string , exponent ) : moment = float ( ndk_string [ 49 : 56 ] ) * ( 10. ** exponent ) version = ndk_string [ : 3 ] magnitude = utils . moment_magnitude_scalar ( moment ) return moment , version , magnitude
Returns the moment and the moment magnitude
78
7
232,441
def serialise_to_nrml ( self , filename , use_defaults = False ) : source_model = self . convert_to_oqhazardlib ( PoissonTOM ( 1.0 ) , 2.0 , 2.0 , 10.0 , use_defaults = use_defaults ) write_source_model ( filename , source_model , name = self . name )
Writes the source model to a nrml source model file given by the filename
86
17
232,442
def input_checks ( catalogue , config , completeness ) : if isinstance ( completeness , np . ndarray ) : # completeness table is a numpy array (i.e. [year, magnitude]) if np . shape ( completeness ) [ 1 ] != 2 : raise ValueError ( 'Completeness Table incorrectly configured' ) else : cmag = completeness [ : , 1 ] ctime = completeness [ : , 0 ] elif isinstance ( completeness , float ) : # Completeness corresponds to a single magnitude (i.e. applies to # the entire catalogue) cmag = np . array ( completeness ) ctime = np . array ( np . min ( catalogue . data [ 'year' ] ) ) else : # Everything is valid - i.e. no completeness magnitude cmag = np . array ( np . min ( catalogue . data [ 'magnitude' ] ) ) ctime = np . array ( np . min ( catalogue . data [ 'year' ] ) ) # Set reference magnitude - if not in config then default to M = 0. if not config : # use default reference magnitude of 0.0 and magnitude interval of 0.1 ref_mag = 0.0 dmag = 0.1 config = { 'reference_magnitude' : None , 'magnitude_interval' : 0.1 } else : if ( not 'reference_magnitude' in config . keys ( ) ) or ( config [ 'reference_magnitude' ] is None ) : ref_mag = 0. config [ 'reference_magnitude' ] = None else : ref_mag = config [ 'reference_magnitude' ] if ( not 'magnitude_interval' in config . keys ( ) ) or not config [ 'magnitude_interval' ] : dmag = 0.1 else : dmag = config [ 'magnitude_interval' ] return cmag , ctime , ref_mag , dmag , config
Performs a basic set of input checks on the data
430
11
232,443
def generate_trunc_gr_magnitudes ( bval , mmin , mmax , nsamples ) : sampler = np . random . uniform ( 0. , 1. , nsamples ) beta = bval * np . log ( 10. ) return ( - 1. / beta ) * ( np . log ( 1. - sampler * ( 1 - np . exp ( - beta * ( mmax - mmin ) ) ) ) ) + mmin
Generate a random list of magnitudes distributed according to a truncated Gutenberg - Richter model
99
19
232,444
def generate_synthetic_magnitudes ( aval , bval , mmin , mmax , nyears ) : nsamples = int ( np . round ( nyears * ( 10. ** ( aval - bval * mmin ) ) , 0 ) ) year = np . random . randint ( 0 , nyears , nsamples ) # Get magnitudes mags = generate_trunc_gr_magnitudes ( bval , mmin , mmax , nsamples ) return { 'magnitude' : mags , 'year' : np . sort ( year ) }
Generates a synthetic catalogue for a specified number of years with magnitudes distributed according to a truncated Gutenberg - Richter distribution
125
25
232,445
def downsample_completeness_table ( comp_table , sample_width = 0.1 , mmax = None ) : new_comp_table = [ ] for i in range ( comp_table . shape [ 0 ] - 1 ) : mvals = np . arange ( comp_table [ i , 1 ] , comp_table [ i + 1 , 1 ] , d_m ) # FIXME: d_m is undefined! new_comp_table . extend ( [ [ comp_table [ i , 0 ] , mval ] for mval in mvals ] ) # If mmax > last magnitude in completeness table if mmax and ( mmax > comp_table [ - 1 , 1 ] ) : new_comp_table . extend ( [ [ comp_table [ - 1 , 0 ] , mval ] for mval in np . arange ( comp_table [ - 1 , 1 ] , mmax + d_m , d_m ) ] ) return np . array ( new_comp_table )
Re - sample the completeness table to a specified sample_width
221
13
232,446
def reset ( yes ) : ok = yes or confirm ( 'Do you really want to destroy all your data? (y/n) ' ) if not ok : return dbpath = os . path . realpath ( os . path . expanduser ( config . dbserver . file ) ) # user must be able to access and write the databse file to remove it if os . path . isfile ( dbpath ) and os . access ( dbpath , os . W_OK ) : if dbserver . get_status ( ) == 'running' : if config . dbserver . multi_user : sys . exit ( 'The oq dbserver must be stopped ' 'before proceeding' ) else : pid = logs . dbcmd ( 'getpid' ) os . kill ( pid , signal . SIGTERM ) time . sleep ( .5 ) # give time to stop assert dbserver . get_status ( ) == 'not-running' print ( 'dbserver stopped' ) try : os . remove ( dbpath ) print ( 'Removed %s' % dbpath ) except OSError as exc : print ( exc , file = sys . stderr ) # fast way of removing everything purge_all ( fast = True )
Remove all the datastores and the database of the current user
268
13
232,447
def set_status ( db , job_id , status ) : assert status in ( 'created' , 'submitted' , 'executing' , 'complete' , 'aborted' , 'failed' ) , status if status in ( 'created' , 'complete' , 'failed' , 'aborted' ) : is_running = 0 else : # 'executing' is_running = 1 if job_id < 0 : rows = db ( 'SELECT id FROM job ORDER BY id DESC LIMIT ?x' , - job_id ) if not rows : return 0 job_id = rows [ - 1 ] . id cursor = db ( 'UPDATE job SET status=?x, is_running=?x WHERE id=?x' , status , is_running , job_id ) return cursor . rowcount
Set the status created executing complete failed aborted consistently with is_running .
176
14
232,448
def create_job ( db , datadir ) : calc_id = get_calc_id ( db , datadir ) + 1 job = dict ( id = calc_id , is_running = 1 , description = 'just created' , user_name = 'openquake' , calculation_mode = 'to be set' , ds_calc_dir = os . path . join ( '%s/calc_%s' % ( datadir , calc_id ) ) ) return db ( 'INSERT INTO job (?S) VALUES (?X)' , job . keys ( ) , job . values ( ) ) . lastrowid
Create job for the given user return it .
145
9
232,449
def import_job ( db , calc_id , calc_mode , description , user_name , status , hc_id , datadir ) : job = dict ( id = calc_id , calculation_mode = calc_mode , description = description , user_name = user_name , hazard_calculation_id = hc_id , is_running = 0 , status = status , ds_calc_dir = os . path . join ( '%s/calc_%s' % ( datadir , calc_id ) ) ) db ( 'INSERT INTO job (?S) VALUES (?X)' , job . keys ( ) , job . values ( ) )
Insert a calculation inside the database if calc_id is not taken
151
13
232,450
def get_job ( db , job_id , username = None ) : job_id = int ( job_id ) if job_id > 0 : dic = dict ( id = job_id ) if username : dic [ 'user_name' ] = username try : return db ( 'SELECT * FROM job WHERE ?A' , dic , one = True ) except NotFound : return # else negative job_id if username : joblist = db ( 'SELECT * FROM job WHERE user_name=?x ' 'ORDER BY id DESC LIMIT ?x' , username , - job_id ) else : joblist = db ( 'SELECT * FROM job ORDER BY id DESC LIMIT ?x' , - job_id ) if not joblist : # no jobs return else : return joblist [ - 1 ]
If job_id is negative return the last calculation of the current user otherwise returns the job_id unchanged .
178
22
232,451
def get_calc_id ( db , datadir , job_id = None ) : calcs = datastore . get_calc_ids ( datadir ) calc_id = 0 if not calcs else calcs [ - 1 ] if job_id is None : try : job_id = db ( 'SELECT seq FROM sqlite_sequence WHERE name="job"' , scalar = True ) except NotFound : job_id = 0 return max ( calc_id , job_id )
Return the latest calc_id by looking both at the datastore and the database .
109
18
232,452
def list_calculations ( db , job_type , user_name ) : jobs = db ( 'SELECT *, %s FROM job WHERE user_name=?x ' 'AND job_type=?x ORDER BY start_time' % JOB_TYPE , user_name , job_type ) out = [ ] if len ( jobs ) == 0 : out . append ( 'None' ) else : out . append ( 'job_id | status | start_time | ' ' description' ) for job in jobs : descr = job . description start_time = job . start_time out . append ( '%6d | %10s | %s | %s' % ( job . id , job . status , start_time , descr ) ) return out
Yield a summary of past calculations .
167
8
232,453
def create_outputs ( db , job_id , keysize , ds_size ) : rows = [ ( job_id , DISPLAY_NAME . get ( key , key ) , key , size ) for key , size in keysize ] db ( 'UPDATE job SET size_mb=?x WHERE id=?x' , ds_size , job_id ) db . insert ( 'output' , 'oq_job_id display_name ds_key size_mb' . split ( ) , rows )
Build a correspondence between the outputs in the datastore and the ones in the database . Also update the datastore size in the job table .
114
30
232,454
def finish ( db , job_id , status ) : db ( 'UPDATE job SET ?D WHERE id=?x' , dict ( is_running = False , status = status , stop_time = datetime . utcnow ( ) ) , job_id )
Set the job columns is_running status and stop_time .
57
13
232,455
def del_calc ( db , job_id , user ) : job_id = int ( job_id ) dependent = db ( 'SELECT id FROM job WHERE hazard_calculation_id=?x' , job_id ) if dependent : return { "error" : 'Cannot delete calculation %d: there ' 'are calculations ' 'dependent from it: %s' % ( job_id , [ j . id for j in dependent ] ) } try : owner , path = db ( 'SELECT user_name, ds_calc_dir FROM job WHERE id=?x' , job_id , one = True ) except NotFound : return { "error" : 'Cannot delete calculation %d:' ' ID does not exist' % job_id } deleted = db ( 'DELETE FROM job WHERE id=?x AND user_name=?x' , job_id , user ) . rowcount if not deleted : return { "error" : 'Cannot delete calculation %d: it belongs to ' '%s and you are %s' % ( job_id , owner , user ) } # try to delete datastore and associated file # path has typically the form /home/user/oqdata/calc_XXX fname = path + ".hdf5" try : os . remove ( fname ) except OSError as exc : # permission error return { "error" : 'Could not remove %s: %s' % ( fname , exc ) } return { "success" : fname }
Delete a calculation and all associated outputs if possible .
332
10
232,456
def log ( db , job_id , timestamp , level , process , message ) : db ( 'INSERT INTO log (job_id, timestamp, level, process, message) ' 'VALUES (?X)' , ( job_id , timestamp , level , process , message ) )
Write a log record in the database .
61
8
232,457
def get_log ( db , job_id ) : logs = db ( 'SELECT * FROM log WHERE job_id=?x ORDER BY id' , job_id ) out = [ ] for log in logs : time = str ( log . timestamp ) [ : - 4 ] # strip decimals out . append ( '[%s #%d %s] %s' % ( time , job_id , log . level , log . message ) ) return out
Extract the logs as a big string
99
8
232,458
def save_performance ( db , job_id , records ) : # NB: rec['counts'] is a numpy.uint64 which is not automatically converted # into an int in Ubuntu 12.04, so we convert it manually below rows = [ ( job_id , rec [ 'operation' ] , rec [ 'time_sec' ] , rec [ 'memory_mb' ] , int ( rec [ 'counts' ] ) ) for rec in records ] db . insert ( 'performance' , 'job_id operation time_sec memory_mb counts' . split ( ) , rows )
Save in the database the performance information about the given job .
127
12
232,459
def get_traceback ( db , job_id ) : # strange: understand why the filter returns two lines or zero lines log = db ( "SELECT * FROM log WHERE job_id=?x AND level='CRITICAL'" , job_id ) if not log : return [ ] response_data = log [ - 1 ] . message . splitlines ( ) return response_data
Return the traceback of the given calculation as a list of lines . The list is empty if the calculation was successful .
81
24
232,460
def webui ( cmd , hostport = '127.0.0.1:8800' , skip_browser = False ) : dbpath = os . path . realpath ( os . path . expanduser ( config . dbserver . file ) ) if os . path . isfile ( dbpath ) and not os . access ( dbpath , os . W_OK ) : sys . exit ( 'This command must be run by the proper user: ' 'see the documentation for details' ) if cmd == 'start' : dbserver . ensure_on ( ) # start the dbserver in a subprocess rundjango ( 'runserver' , hostport , skip_browser ) elif cmd in commands : rundjango ( cmd )
start the webui server in foreground or perform other operation on the django application
162
16
232,461
def _get_basic_term ( self , C , rup , dists ) : # Fictitious depth calculation if rup . mag > 5. : c4m = C [ 'c4' ] elif rup . mag > 4. : c4m = C [ 'c4' ] - ( C [ 'c4' ] - 1. ) * ( 5. - rup . mag ) else : c4m = 1. R = np . sqrt ( dists . rrup ** 2. + c4m ** 2. ) # basic form base_term = C [ 'a1' ] * np . ones_like ( dists . rrup ) + C [ 'a17' ] * dists . rrup # equation 2 at page 1030 if rup . mag >= C [ 'm1' ] : base_term += ( C [ 'a5' ] * ( rup . mag - C [ 'm1' ] ) + C [ 'a8' ] * ( 8.5 - rup . mag ) ** 2. + ( C [ 'a2' ] + C [ 'a3' ] * ( rup . mag - C [ 'm1' ] ) ) * np . log ( R ) ) elif rup . mag >= self . CONSTS [ 'm2' ] : base_term += ( C [ 'a4' ] * ( rup . mag - C [ 'm1' ] ) + C [ 'a8' ] * ( 8.5 - rup . mag ) ** 2. + ( C [ 'a2' ] + C [ 'a3' ] * ( rup . mag - C [ 'm1' ] ) ) * np . log ( R ) ) else : base_term += ( C [ 'a4' ] * ( self . CONSTS [ 'm2' ] - C [ 'm1' ] ) + C [ 'a8' ] * ( 8.5 - self . CONSTS [ 'm2' ] ) ** 2. + C [ 'a6' ] * ( rup . mag - self . CONSTS [ 'm2' ] ) + C [ 'a7' ] * ( rup . mag - self . CONSTS [ 'm2' ] ) ** 2. + ( C [ 'a2' ] + C [ 'a3' ] * ( self . CONSTS [ 'm2' ] - C [ 'm1' ] ) ) * np . log ( R ) ) return base_term
Compute and return basic form see page 1030 .
555
11
232,462
def _get_vs30star ( self , vs30 , imt ) : # compute the v1 value (see eq. 9, page 1034) if imt . name == "SA" : t = imt . period if t <= 0.50 : v1 = 1500.0 elif t < 3.0 : v1 = np . exp ( - 0.35 * np . log ( t / 0.5 ) + np . log ( 1500. ) ) else : v1 = 800.0 elif imt . name == "PGA" : v1 = 1500.0 else : # This covers the PGV case v1 = 1500.0 # set the vs30 star value (see eq. 8, page 1034) vs30_star = np . ones_like ( vs30 ) * vs30 vs30_star [ vs30 >= v1 ] = v1 return vs30_star
This computes equations 8 and 9 at page 1034
195
11
232,463
def _get_site_response_term ( self , C , imt , vs30 , sa1180 ) : # vs30 star vs30_star = self . _get_vs30star ( vs30 , imt ) # compute the site term site_resp_term = np . zeros_like ( vs30 ) gt_vlin = vs30 >= C [ 'vlin' ] lw_vlin = vs30 < C [ 'vlin' ] # compute site response term for sites with vs30 greater than vlin vs30_rat = vs30_star / C [ 'vlin' ] site_resp_term [ gt_vlin ] = ( ( C [ 'a10' ] + C [ 'b' ] * self . CONSTS [ 'n' ] ) * np . log ( vs30_rat [ gt_vlin ] ) ) # compute site response term for sites with vs30 lower than vlin site_resp_term [ lw_vlin ] = ( C [ 'a10' ] * np . log ( vs30_rat [ lw_vlin ] ) - C [ 'b' ] * np . log ( sa1180 [ lw_vlin ] + C [ 'c' ] ) + C [ 'b' ] * np . log ( sa1180 [ lw_vlin ] + C [ 'c' ] * vs30_rat [ lw_vlin ] ** self . CONSTS [ 'n' ] ) ) return site_resp_term
Compute and return site response model term see page 1033
334
12
232,464
def _get_hanging_wall_term ( self , C , dists , rup ) : if rup . dip == 90.0 : return np . zeros_like ( dists . rx ) else : Fhw = np . zeros_like ( dists . rx ) Fhw [ dists . rx > 0 ] = 1. # Compute taper t1 T1 = np . ones_like ( dists . rx ) T1 *= 60. / 45. if rup . dip <= 30. else ( 90. - rup . dip ) / 45.0 # Compute taper t2 (eq 12 at page 1039) - a2hw set to 0.2 as # indicated at page 1041 T2 = np . zeros_like ( dists . rx ) a2hw = 0.2 if rup . mag > 6.5 : T2 += ( 1. + a2hw * ( rup . mag - 6.5 ) ) elif rup . mag > 5.5 : T2 += ( 1. + a2hw * ( rup . mag - 6.5 ) - ( 1. - a2hw ) * ( rup . mag - 6.5 ) ** 2 ) else : T2 *= 0. # Compute taper t3 (eq. 13 at page 1039) - r1 and r2 specified at # page 1040 T3 = np . zeros_like ( dists . rx ) r1 = rup . width * np . cos ( np . radians ( rup . dip ) ) r2 = 3. * r1 # idx = dists . rx < r1 T3 [ idx ] = ( np . ones_like ( dists . rx ) [ idx ] * self . CONSTS [ 'h1' ] + self . CONSTS [ 'h2' ] * ( dists . rx [ idx ] / r1 ) + self . CONSTS [ 'h3' ] * ( dists . rx [ idx ] / r1 ) ** 2 ) # idx = ( ( dists . rx >= r1 ) & ( dists . rx <= r2 ) ) T3 [ idx ] = 1. - ( dists . rx [ idx ] - r1 ) / ( r2 - r1 ) # Compute taper t4 (eq. 14 at page 1040) T4 = np . zeros_like ( dists . rx ) # if rup . ztor <= 10. : T4 += ( 1. - rup . ztor ** 2. / 100. ) # Compute T5 (eq 15a at page 1040) - ry1 computed according to # suggestions provided at page 1040 T5 = np . zeros_like ( dists . rx ) ry1 = dists . rx * np . tan ( np . radians ( 20. ) ) # idx = ( dists . ry0 - ry1 ) <= 0.0 T5 [ idx ] = 1. # idx = ( ( ( dists . ry0 - ry1 ) > 0.0 ) & ( ( dists . ry0 - ry1 ) < 5.0 ) ) T5 [ idx ] = 1. - ( dists . ry0 [ idx ] - ry1 [ idx ] ) / 5.0 # Finally, compute the hanging wall term return Fhw * C [ 'a13' ] * T1 * T2 * T3 * T4 * T5
Compute and return hanging wall model term see page 1038 .
788
13
232,465
def _get_top_of_rupture_depth_term ( self , C , imt , rup ) : if rup . ztor >= 20.0 : return C [ 'a15' ] else : return C [ 'a15' ] * rup . ztor / 20.0
Compute and return top of rupture depth term . See paragraph Depth - to - Top of Rupture Model page 1042 .
65
26
232,466
def _get_soil_depth_term ( self , C , z1pt0 , vs30 ) : # Get reference z1pt0 z1ref = self . _get_z1pt0ref ( vs30 ) # Get z1pt0 z10 = copy . deepcopy ( z1pt0 ) # This is used for the calculation of the motion on reference rock idx = z1pt0 < 0 z10 [ idx ] = z1ref [ idx ] factor = np . log ( ( z10 + 0.01 ) / ( z1ref + 0.01 ) ) # Here we use a linear interpolation as suggested in the 'Application # guidelines' at page 1044 # Above 700 m/s the trend is flat, but we extend the Vs30 range to # 6,000 m/s (basically the upper limit for mantle shear wave velocity # on earth) to allow extrapolation without throwing an error. f2 = interpolate . interp1d ( [ 0.0 , 150 , 250 , 400 , 700 , 1000 , 6000 ] , [ C [ 'a43' ] , C [ 'a43' ] , C [ 'a44' ] , C [ 'a45' ] , C [ 'a46' ] , C [ 'a46' ] , C [ 'a46' ] ] , kind = 'linear' ) return f2 ( vs30 ) * factor
Compute and return soil depth term . See page 1042 .
303
13
232,467
def _get_stddevs ( self , C , imt , rup , sites , stddev_types , sa1180 , dists ) : std_intra = self . _get_intra_event_std ( C , rup . mag , sa1180 , sites . vs30 , sites . vs30measured , dists . rrup ) std_inter = self . _get_inter_event_std ( C , rup . mag , sa1180 , sites . vs30 ) stddevs = [ ] for stddev_type in stddev_types : assert stddev_type in self . DEFINED_FOR_STANDARD_DEVIATION_TYPES if stddev_type == const . StdDev . TOTAL : stddevs . append ( np . sqrt ( std_intra ** 2 + std_inter ** 2 ) ) elif stddev_type == const . StdDev . INTRA_EVENT : stddevs . append ( std_intra ) elif stddev_type == const . StdDev . INTER_EVENT : stddevs . append ( std_inter ) return stddevs
Return standard deviations as described in paragraph Equations for standard deviation page 1046 .
266
16
232,468
def _get_intra_event_std ( self , C , mag , sa1180 , vs30 , vs30measured , rrup ) : phi_al = self . _get_phi_al_regional ( C , mag , vs30measured , rrup ) derAmp = self . _get_derivative ( C , sa1180 , vs30 ) phi_amp = 0.4 idx = phi_al < phi_amp if np . any ( idx ) : # In the case of small magnitudes and long periods it is possible # for phi_al to take a value less than phi_amp, which would return # a complex value. According to the GMPE authors in this case # phi_amp should be reduced such that it is fractionally smaller # than phi_al phi_amp = 0.4 * np . ones_like ( phi_al ) phi_amp [ idx ] = 0.99 * phi_al [ idx ] phi_b = np . sqrt ( phi_al ** 2 - phi_amp ** 2 ) phi = np . sqrt ( phi_b ** 2 * ( 1 + derAmp ) ** 2 + phi_amp ** 2 ) return phi
Returns Phi as described at pages 1046 and 1047
282
11
232,469
def _get_derivative ( self , C , sa1180 , vs30 ) : derAmp = np . zeros_like ( vs30 ) n = self . CONSTS [ 'n' ] c = C [ 'c' ] b = C [ 'b' ] idx = vs30 < C [ 'vlin' ] derAmp [ idx ] = ( b * sa1180 [ idx ] * ( - 1. / ( sa1180 [ idx ] + c ) + 1. / ( sa1180 [ idx ] + c * ( vs30 [ idx ] / C [ 'vlin' ] ) ** n ) ) ) return derAmp
Returns equation 30 page 1047
150
6
232,470
def _get_regional_term ( self , C , imt , vs30 , rrup ) : f3 = interpolate . interp1d ( [ 150 , 250 , 350 , 450 , 600 , 850 , 1150 , 2000 ] , [ C [ 'a36' ] , C [ 'a37' ] , C [ 'a38' ] , C [ 'a39' ] , C [ 'a40' ] , C [ 'a41' ] , C [ 'a42' ] , C [ 'a42' ] ] , kind = 'linear' ) return f3 ( vs30 ) + C [ 'a29' ] * rrup
Compute regional term for Japan . See page 1043
143
11
232,471
def gc ( coeff , mag ) : if mag > 6.5 : a1ca = coeff [ 'ua' ] a1cb = coeff [ 'ub' ] a1cc = coeff [ 'uc' ] a1cd = coeff [ 'ud' ] a1ce = coeff [ 'ue' ] a2ca = coeff [ 'ia' ] a2cb = coeff [ 'ib' ] a2cc = coeff [ 'ic' ] a2cd = coeff [ 'id' ] a2ce = coeff [ 'ie' ] else : a1ca = coeff [ 'a' ] a1cb = coeff [ 'b' ] a1cc = coeff [ 'c' ] a1cd = coeff [ 'd' ] a1ce = coeff [ 'e' ] a2ca = coeff [ 'ma' ] a2cb = coeff [ 'mb' ] a2cc = coeff [ 'mc' ] a2cd = coeff [ 'md' ] a2ce = coeff [ 'me' ] return a1ca , a1cb , a1cc , a1cd , a1ce , a2ca , a2cb , a2cc , a2cd , a2ce
Returns the set of coefficients to be used for the calculation of GM as a function of earthquake magnitude
279
19
232,472
def rbf ( ra , coeff , mag ) : a1ca , a1cb , a1cc , a1cd , a1ce , a2ca , a2cb , a2cc , a2cd , a2ce = gc ( coeff , mag ) term1 = a1ca + a1cb * mag + a1cc * np . log ( ra + a1cd * np . exp ( a1ce * mag ) ) term2 = a2ca + a2cb * mag term3 = a2cd * np . exp ( a2ce * mag ) return np . exp ( ( term1 - term2 ) / a2cc ) - term3
Calculate the median ground motion for a given magnitude and distance
147
13
232,473
def fnc ( ra , * args ) : # # epicentral distance repi = args [ 0 ] # # azimuth theta = args [ 1 ] # # magnitude mag = args [ 2 ] # # coefficients coeff = args [ 3 ] # # compute the difference between epicentral distances rb = rbf ( ra , coeff , mag ) t1 = ra ** 2 * ( np . sin ( np . radians ( theta ) ) ) ** 2 t2 = rb ** 2 * ( np . cos ( np . radians ( theta ) ) ) ** 2 xx = ra * rb / ( t1 + t2 ) ** 0.5 return xx - repi
Function used in the minimisation problem .
149
8
232,474
def get_ras ( repi , theta , mag , coeff ) : rx = 100. ras = 200. # # calculate the difference between epicentral distances dff = fnc ( ras , repi , theta , mag , coeff ) while abs ( dff ) > 1e-3 : # update the value of distance computed if dff > 0. : ras = ras - rx else : ras = ras + rx dff = fnc ( ras , repi , theta , mag , coeff ) rx = rx / 2. if rx < 1e-3 : break return ras
Computes equivalent distance
142
4
232,475
def _get_stddevs ( self , C , stddev_types , rup , imt , num_sites ) : stddevs = [ ] for stddev_type in stddev_types : sigma_mean = self . _compute_standard_dev ( rup , imt , C ) sigma_tot = np . sqrt ( ( sigma_mean ** 2 ) + ( C [ 'SigmaReg' ] ** 2 ) ) sigma_tot = np . log10 ( np . exp ( sigma_tot ) ) stddevs . append ( sigma_tot + np . zeros ( num_sites ) ) return stddevs
Return standard deviations as defined in eq . 4 and 5 page 744 based on table 8 page 744 . Eq . 5 yields std dev in natural log so convert to log10
156
37
232,476
def _compute_standard_dev ( self , rup , imt , C ) : sigma_mean = 0. if imt . name in "SA PGA" : psi = - 6.898E-3 else : psi = - 3.054E-5 if rup . mag <= 6.5 : sigma_mean = ( C [ 'c12' ] * rup . mag ) + C [ 'c13' ] elif rup . mag > 6.5 : sigma_mean = ( psi * rup . mag ) + C [ 'c14' ] return sigma_mean
Compute the the standard deviation in terms of magnitude described on page 744 eq . 4
135
18
232,477
def insert ( self , table , columns , rows ) : cursor = self . conn . cursor ( ) if len ( rows ) : templ , _args = match ( 'INSERT INTO ?s (?S) VALUES (?X)' , table , columns , rows [ 0 ] ) cursor . executemany ( templ , rows ) return cursor
Insert several rows with executemany . Return a cursor .
74
12
232,478
def _cluster ( param , tom , imtls , gsims , grp_ids , pmap ) : pmapclu = AccumDict ( { grp_id : ProbabilityMap ( len ( imtls . array ) , len ( gsims ) ) for grp_id in grp_ids } ) # Get temporal occurrence model # Number of occurrences for the cluster first = True for nocc in range ( 0 , 50 ) : # TODO fix this once the occurrence rate will be used just as # an object attribute ocr = tom . occurrence_rate prob_n_occ = tom . get_probability_n_occurrences ( ocr , nocc ) if first : pmapclu = prob_n_occ * ( ~ pmap ) ** nocc first = False else : pmapclu += prob_n_occ * ( ~ pmap ) ** nocc pmap = ~ pmapclu return pmap
Computes the probability map in case of a cluster group
209
11
232,479
def _get_stddevs ( self , C , rup , shape , stddev_types ) : weight = self . _compute_weight_std ( C , rup . mag ) std_intra = weight * C [ "sd1" ] * np . ones ( shape ) std_inter = weight * C [ "sd2" ] * np . ones ( shape ) stddevs = [ ] for stddev_type in stddev_types : assert stddev_type in self . DEFINED_FOR_STANDARD_DEVIATION_TYPES if stddev_type == const . StdDev . TOTAL : stddevs . append ( np . sqrt ( std_intra ** 2. + std_inter ** 2. ) ) elif stddev_type == const . StdDev . INTRA_EVENT : stddevs . append ( std_intra ) elif stddev_type == const . StdDev . INTER_EVENT : stddevs . append ( std_inter ) return stddevs
Return standard deviations as defined in p . 971 .
241
11
232,480
def _compute_weight_std ( self , C , mag ) : if mag < 6.0 : return C [ 'a1' ] elif mag >= 6.0 and mag < 6.5 : return C [ 'a1' ] + ( C [ 'a2' ] - C [ 'a1' ] ) * ( ( mag - 6.0 ) / 0.5 ) else : return C [ 'a2' ]
Common part of equations 8 and 9 page 971 .
95
11
232,481
def _compute_magnitude_scaling_term ( self , C , mag ) : c1 = self . CONSTS [ 'c1' ] if mag <= c1 : return C [ 'b1' ] + C [ 'b2' ] * ( mag - c1 ) + C [ 'b3' ] * ( 8.5 - mag ) ** 2 else : return C [ 'b1' ] + C [ 'b7' ] * ( mag - c1 ) + C [ 'b3' ] * ( 8.5 - mag ) ** 2
Compute and return magnitude scaling term in equation 2 page 970 .
125
13
232,482
def _compute_geometric_decay_term ( self , C , mag , dists ) : c1 = self . CONSTS [ 'c1' ] return ( ( C [ 'b4' ] + C [ 'b5' ] * ( mag - c1 ) ) * np . log ( np . sqrt ( dists . rjb ** 2.0 + C [ 'b6' ] ** 2.0 ) ) )
Compute and return geometric decay term in equation 3 page 970 .
98
13
232,483
def _compute_anelestic_attenuation_term ( self , C , dists ) : f_aat = np . zeros_like ( dists . rjb ) idx = dists . rjb > 80.0 f_aat [ idx ] = C [ "b10" ] * ( dists . rjb [ idx ] - 80.0 ) return f_aat
Compute and return anelastic attenuation term in equation 5 page 970 .
92
16
232,484
def _compute_non_linear_term ( self , C , pga_only , sites ) : Vref = self . CONSTS [ 'Vref' ] Vcon = self . CONSTS [ 'Vcon' ] c = self . CONSTS [ 'c' ] n = self . CONSTS [ 'n' ] lnS = np . zeros_like ( sites . vs30 ) # equation (6a) idx = sites . vs30 < Vref lnS [ idx ] = ( C [ 'sb1' ] * np . log ( sites . vs30 [ idx ] / Vref ) + C [ 'sb2' ] * np . log ( ( pga_only [ idx ] + c * ( sites . vs30 [ idx ] / Vref ) ** n ) / ( ( pga_only [ idx ] + c ) * ( sites . vs30 [ idx ] / Vref ) ** n ) ) ) # equation (6b) idx = sites . vs30 >= Vref new_sites = sites . vs30 [ idx ] new_sites [ new_sites > Vcon ] = Vcon lnS [ idx ] = C [ 'sb1' ] * np . log ( new_sites / Vref ) return lnS
Compute non - linear term equation 6 page 970 .
287
11
232,485
def _compute_mean ( self , C , mag , dists , rake ) : mean = ( self . _compute_magnitude_scaling_term ( C , mag ) + self . _compute_geometric_decay_term ( C , mag , dists ) + self . _compute_faulting_style_term ( C , rake ) + self . _compute_anelestic_attenuation_term ( C , dists ) ) return mean
Compute and return mean value without site conditions that is equations 2 - 5 page 970 .
106
18
232,486
def get_bounding_box ( self , maxdist ) : return utils . get_bounding_box ( [ ps . location for ps in self ] , maxdist )
Bounding box containing all the point sources enlarged by the maximum distance .
38
14
232,487
def _compute_standard_dev ( self , rup , imt , C ) : sigma_mean = 0. if rup . mag <= 7.0 : sigma_mean = ( C [ 'c12' ] * rup . mag ) + C [ 'c13' ] elif rup . mag > 7.0 : sigma_mean = ( - 0.00695 * rup . mag ) + C [ 'c14' ] return sigma_mean
Compute the the standard deviation in terms of magnitude described on p . 1866 eq . 6
105
19
232,488
def get_rate_osr_normal_transform ( self , threshold_moment , id0 ) : # Get normal component e1h_ridge = np . zeros ( np . sum ( id0 ) , dtype = float ) e2h_ridge = self . strain . data [ 'e1h' ] [ id0 ] + self . strain . data [ 'e2h' ] [ id0 ] err_ridge = - ( e1h_ridge + e2h_ridge ) calculated_rate_ridge = self . continuum_seismicity ( threshold_moment , e1h_ridge , e2h_ridge , err_ridge , self . regionalisation [ 'OSRnor' ] ) # Get transform e1h_trans = self . strain . data [ 'e1h' ] [ id0 ] e2h_trans = - e1h_trans err_trans = np . zeros ( np . sum ( id0 ) , dtype = float ) calculated_rate_transform = self . continuum_seismicity ( threshold_moment , e1h_trans , e2h_trans , err_trans , self . regionalisation [ 'OTFmed' ] ) return ( self . regionalisation [ 'OSRnor' ] [ 'adjustment_factor' ] * ( calculated_rate_ridge + calculated_rate_transform ) )
Gets seismicity rate for special case of the ridge condition with spreading and transform component
297
17
232,489
def get_rate_osr_convergent_transform ( self , threshold_moment , id0 ) : # Get convergent component e1h_ocb = self . strain . data [ 'e1h' ] [ id0 ] + self . strain . data [ 'e2h' ] [ id0 ] e2h_ocb = np . zeros ( np . sum ( id0 ) , dtype = float ) err_ocb = - ( e1h_ocb + e2h_ocb ) calculated_rate_ocb = self . continuum_seismicity ( threshold_moment , e1h_ocb , e2h_ocb , err_ocb , self . regionalisation [ 'OCB' ] ) # Get transform e2h_trans = self . strain . data [ 'e2h' ] [ id0 ] e1h_trans = - e2h_trans err_trans = np . zeros ( np . sum ( id0 ) , dtype = float ) calculated_rate_transform = self . continuum_seismicity ( threshold_moment , e1h_trans , e2h_trans , err_trans , self . regionalisation [ 'OTFmed' ] ) return ( self . regionalisation [ 'OSRnor' ] [ 'adjustment_factor' ] * ( calculated_rate_ocb + calculated_rate_transform ) )
Calculates seismicity rate for special case of the ridge condition with convergence and transform
309
17
232,490
def get_median_area ( self , mag , rake ) : if rake is None : # Return average of strike-slip and dip-slip curves return power ( 10.0 , ( mag - 4.185 ) ) elif ( - 45 <= rake <= 45 ) or ( rake >= 135 ) or ( rake <= - 135 ) : # strike-slip return power ( 10.0 , ( mag - 4.18 ) ) else : # Dip-slip (thrust or normal), and undefined rake return power ( 10.0 , ( mag - 4.19 ) )
Calculates median fault area from magnitude .
124
9
232,491
def _get_base_url ( request ) : if request . is_secure ( ) : base_url = 'https://%s' else : base_url = 'http://%s' base_url %= request . META [ 'HTTP_HOST' ] return base_url
Construct a base URL given a request object .
63
9
232,492
def _prepare_job ( request , candidates ) : temp_dir = tempfile . mkdtemp ( ) inifiles = [ ] arch = request . FILES . get ( 'archive' ) if arch is None : # move each file to a new temp dir, using the upload file names, # not the temporary ones for each_file in request . FILES . values ( ) : new_path = os . path . join ( temp_dir , each_file . name ) shutil . move ( each_file . temporary_file_path ( ) , new_path ) if each_file . name in candidates : inifiles . append ( new_path ) return inifiles # else extract the files from the archive into temp_dir return readinput . extract_from_zip ( arch , candidates )
Creates a temporary directory move uploaded files there and select the job file by looking at the candidate names .
173
21
232,493
def ajax_login ( request ) : username = request . POST [ 'username' ] password = request . POST [ 'password' ] user = authenticate ( username = username , password = password ) if user is not None : if user . is_active : login ( request , user ) return HttpResponse ( content = 'Successful login' , content_type = 'text/plain' , status = 200 ) else : return HttpResponse ( content = 'Disabled account' , content_type = 'text/plain' , status = 403 ) else : return HttpResponse ( content = 'Invalid login' , content_type = 'text/plain' , status = 403 )
Accept a POST request to login .
146
7
232,494
def get_available_gsims ( request ) : gsims = list ( gsim . get_available_gsims ( ) ) return HttpResponse ( content = json . dumps ( gsims ) , content_type = JSON )
Return a list of strings with the available GSIMs
51
11
232,495
def validate_nrml ( request ) : xml_text = request . POST . get ( 'xml_text' ) if not xml_text : return HttpResponseBadRequest ( 'Please provide the "xml_text" parameter' ) xml_file = gettemp ( xml_text , suffix = '.xml' ) try : nrml . to_python ( xml_file ) except ExpatError as exc : return _make_response ( error_msg = str ( exc ) , error_line = exc . lineno , valid = False ) except Exception as exc : # get the exception message exc_msg = exc . args [ 0 ] if isinstance ( exc_msg , bytes ) : exc_msg = exc_msg . decode ( 'utf-8' ) # make it a unicode object elif isinstance ( exc_msg , str ) : pass else : # if it is another kind of object, it is not obvious a priori how # to extract the error line from it return _make_response ( error_msg = str ( exc_msg ) , error_line = None , valid = False ) # if the line is not mentioned, the whole message is taken error_msg = exc_msg . split ( ', line' ) [ 0 ] # check if the exc_msg contains a line number indication search_match = re . search ( r'line \d+' , exc_msg ) if search_match : error_line = int ( search_match . group ( 0 ) . split ( ) [ 1 ] ) else : error_line = None return _make_response ( error_msg = error_msg , error_line = error_line , valid = False ) else : return _make_response ( error_msg = None , error_line = None , valid = True )
Leverage oq - risklib to check if a given XML text is a valid NRML
383
20
232,496
def calc_list ( request , id = None ) : # view associated to the endpoints /v1/calc/list and /v1/calc/:id/status base_url = _get_base_url ( request ) calc_data = logs . dbcmd ( 'get_calcs' , request . GET , utils . get_valid_users ( request ) , utils . get_acl_on ( request ) , id ) response_data = [ ] username = psutil . Process ( os . getpid ( ) ) . username ( ) for ( hc_id , owner , status , calculation_mode , is_running , desc , pid , parent_id , size_mb ) in calc_data : url = urlparse . urljoin ( base_url , 'v1/calc/%d' % hc_id ) abortable = False if is_running : try : if psutil . Process ( pid ) . username ( ) == username : abortable = True except psutil . NoSuchProcess : pass response_data . append ( dict ( id = hc_id , owner = owner , calculation_mode = calculation_mode , status = status , is_running = bool ( is_running ) , description = desc , url = url , parent_id = parent_id , abortable = abortable , size_mb = size_mb ) ) # if id is specified the related dictionary is returned instead the list if id is not None : [ response_data ] = response_data return HttpResponse ( content = json . dumps ( response_data ) , content_type = JSON )
Get a list of calculations and report their id status calculation_mode is_running description and a url where more detailed information can be accessed . This is called several times by the Javascript .
349
37
232,497
def calc_abort ( request , calc_id ) : job = logs . dbcmd ( 'get_job' , calc_id ) if job is None : message = { 'error' : 'Unknown job %s' % calc_id } return HttpResponse ( content = json . dumps ( message ) , content_type = JSON ) if job . status not in ( 'submitted' , 'executing' ) : message = { 'error' : 'Job %s is not running' % job . id } return HttpResponse ( content = json . dumps ( message ) , content_type = JSON ) if not utils . user_has_permission ( request , job . user_name ) : message = { 'error' : ( 'User %s has no permission to abort job %s' % ( job . user_name , job . id ) ) } return HttpResponse ( content = json . dumps ( message ) , content_type = JSON , status = 403 ) if job . pid : # is a spawned job try : os . kill ( job . pid , signal . SIGTERM ) except Exception as exc : logging . error ( exc ) else : logging . warning ( 'Aborting job %d, pid=%d' , job . id , job . pid ) logs . dbcmd ( 'set_status' , job . id , 'aborted' ) message = { 'success' : 'Killing job %d' % job . id } return HttpResponse ( content = json . dumps ( message ) , content_type = JSON ) message = { 'error' : 'PID for job %s not found' % job . id } return HttpResponse ( content = json . dumps ( message ) , content_type = JSON )
Abort the given calculation it is it running
378
9
232,498
def calc_remove ( request , calc_id ) : # Only the owner can remove a job user = utils . get_user ( request ) try : message = logs . dbcmd ( 'del_calc' , calc_id , user ) except dbapi . NotFound : return HttpResponseNotFound ( ) if 'success' in message : return HttpResponse ( content = json . dumps ( message ) , content_type = JSON , status = 200 ) elif 'error' in message : logging . error ( message [ 'error' ] ) return HttpResponse ( content = json . dumps ( message ) , content_type = JSON , status = 403 ) else : # This is an untrapped server error logging . error ( message ) return HttpResponse ( content = message , content_type = 'text/plain' , status = 500 )
Remove the calculation id
183
4
232,499
def log_to_json ( log ) : return [ log . timestamp . isoformat ( ) [ : 22 ] , log . level , log . process , log . message ]
Convert a log record into a list of strings
37
10