repo stringlengths 7 55 | path stringlengths 4 127 | func_name stringlengths 1 88 | original_string stringlengths 75 19.8k | language stringclasses 1 value | code stringlengths 75 19.8k | code_tokens listlengths 20 707 | docstring stringlengths 3 17.3k | docstring_tokens listlengths 3 222 | sha stringlengths 40 40 | url stringlengths 87 242 | partition stringclasses 1 value | idx int64 0 252k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|
gem/oq-engine | openquake/hazardlib/source/rupture_collection.py | split | def split(src, chunksize=MINWEIGHT):
"""
Split a complex fault source in chunks
"""
for i, block in enumerate(block_splitter(src.iter_ruptures(), chunksize,
key=operator.attrgetter('mag'))):
rup = block[0]
source_id = '%s:%d' % (src.source_id, i)
amfd = mfd.ArbitraryMFD([rup.mag], [rup.mag_occ_rate])
rcs = RuptureCollectionSource(
source_id, src.name, src.tectonic_region_type, amfd, block)
yield rcs | python | def split(src, chunksize=MINWEIGHT):
"""
Split a complex fault source in chunks
"""
for i, block in enumerate(block_splitter(src.iter_ruptures(), chunksize,
key=operator.attrgetter('mag'))):
rup = block[0]
source_id = '%s:%d' % (src.source_id, i)
amfd = mfd.ArbitraryMFD([rup.mag], [rup.mag_occ_rate])
rcs = RuptureCollectionSource(
source_id, src.name, src.tectonic_region_type, amfd, block)
yield rcs | [
"def",
"split",
"(",
"src",
",",
"chunksize",
"=",
"MINWEIGHT",
")",
":",
"for",
"i",
",",
"block",
"in",
"enumerate",
"(",
"block_splitter",
"(",
"src",
".",
"iter_ruptures",
"(",
")",
",",
"chunksize",
",",
"key",
"=",
"operator",
".",
"attrgetter",
"(",
"'mag'",
")",
")",
")",
":",
"rup",
"=",
"block",
"[",
"0",
"]",
"source_id",
"=",
"'%s:%d'",
"%",
"(",
"src",
".",
"source_id",
",",
"i",
")",
"amfd",
"=",
"mfd",
".",
"ArbitraryMFD",
"(",
"[",
"rup",
".",
"mag",
"]",
",",
"[",
"rup",
".",
"mag_occ_rate",
"]",
")",
"rcs",
"=",
"RuptureCollectionSource",
"(",
"source_id",
",",
"src",
".",
"name",
",",
"src",
".",
"tectonic_region_type",
",",
"amfd",
",",
"block",
")",
"yield",
"rcs"
] | Split a complex fault source in chunks | [
"Split",
"a",
"complex",
"fault",
"source",
"in",
"chunks"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/source/rupture_collection.py#L61-L72 | train | 233,000 |
gem/oq-engine | openquake/hazardlib/source/rupture_collection.py | RuptureCollectionSource.get_bounding_box | def get_bounding_box(self, maxdist):
"""
Bounding box containing all the hypocenters, enlarged by the
maximum distance
"""
locations = [rup.hypocenter for rup in self.ruptures]
return get_bounding_box(locations, maxdist) | python | def get_bounding_box(self, maxdist):
"""
Bounding box containing all the hypocenters, enlarged by the
maximum distance
"""
locations = [rup.hypocenter for rup in self.ruptures]
return get_bounding_box(locations, maxdist) | [
"def",
"get_bounding_box",
"(",
"self",
",",
"maxdist",
")",
":",
"locations",
"=",
"[",
"rup",
".",
"hypocenter",
"for",
"rup",
"in",
"self",
".",
"ruptures",
"]",
"return",
"get_bounding_box",
"(",
"locations",
",",
"maxdist",
")"
] | Bounding box containing all the hypocenters, enlarged by the
maximum distance | [
"Bounding",
"box",
"containing",
"all",
"the",
"hypocenters",
"enlarged",
"by",
"the",
"maximum",
"distance"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/source/rupture_collection.py#L52-L58 | train | 233,001 |
gem/oq-engine | openquake/commands/show_attrs.py | show_attrs | def show_attrs(key, calc_id=-1):
"""
Show the attributes of a HDF5 dataset in the datastore.
"""
ds = util.read(calc_id)
try:
attrs = h5py.File.__getitem__(ds.hdf5, key).attrs
except KeyError:
print('%r is not in %s' % (key, ds))
else:
if len(attrs) == 0:
print('%s has no attributes' % key)
for name, value in attrs.items():
print(name, value)
finally:
ds.close() | python | def show_attrs(key, calc_id=-1):
"""
Show the attributes of a HDF5 dataset in the datastore.
"""
ds = util.read(calc_id)
try:
attrs = h5py.File.__getitem__(ds.hdf5, key).attrs
except KeyError:
print('%r is not in %s' % (key, ds))
else:
if len(attrs) == 0:
print('%s has no attributes' % key)
for name, value in attrs.items():
print(name, value)
finally:
ds.close() | [
"def",
"show_attrs",
"(",
"key",
",",
"calc_id",
"=",
"-",
"1",
")",
":",
"ds",
"=",
"util",
".",
"read",
"(",
"calc_id",
")",
"try",
":",
"attrs",
"=",
"h5py",
".",
"File",
".",
"__getitem__",
"(",
"ds",
".",
"hdf5",
",",
"key",
")",
".",
"attrs",
"except",
"KeyError",
":",
"print",
"(",
"'%r is not in %s'",
"%",
"(",
"key",
",",
"ds",
")",
")",
"else",
":",
"if",
"len",
"(",
"attrs",
")",
"==",
"0",
":",
"print",
"(",
"'%s has no attributes'",
"%",
"key",
")",
"for",
"name",
",",
"value",
"in",
"attrs",
".",
"items",
"(",
")",
":",
"print",
"(",
"name",
",",
"value",
")",
"finally",
":",
"ds",
".",
"close",
"(",
")"
] | Show the attributes of a HDF5 dataset in the datastore. | [
"Show",
"the",
"attributes",
"of",
"a",
"HDF5",
"dataset",
"in",
"the",
"datastore",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commands/show_attrs.py#L24-L39 | train | 233,002 |
gem/oq-engine | utils/compare_mean_curves.py | compare_mean_curves | def compare_mean_curves(calc_ref, calc, nsigma=3):
"""
Compare the hazard curves coming from two different calculations.
"""
dstore_ref = datastore.read(calc_ref)
dstore = datastore.read(calc)
imtls = dstore_ref['oqparam'].imtls
if dstore['oqparam'].imtls != imtls:
raise RuntimeError('The IMTs and levels are different between '
'calculation %d and %d' % (calc_ref, calc))
sitecol_ref = dstore_ref['sitecol']
sitecol = dstore['sitecol']
site_id_ref = {(lon, lat): sid for sid, lon, lat in zip(
sitecol_ref.sids, sitecol_ref.lons, sitecol_ref.lats)}
site_id = {(lon, lat): sid for sid, lon, lat in zip(
sitecol.sids, sitecol.lons, sitecol.lats)}
common = set(site_id_ref) & set(site_id)
if not common:
raise RuntimeError('There are no common sites between calculation '
'%d and %d' % (calc_ref, calc))
pmap_ref = PmapGetter(dstore_ref, sids=[site_id_ref[lonlat]
for lonlat in common]).get_mean()
pmap = PmapGetter(dstore, sids=[site_id[lonlat]
for lonlat in common]).get_mean()
for lonlat in common:
mean, std = pmap[site_id[lonlat]].array.T # shape (2, N)
mean_ref, std_ref = pmap_ref[site_id_ref[lonlat]].array.T
err = numpy.sqrt(std**2 + std_ref**2)
for imt in imtls:
sl = imtls(imt)
ok = (numpy.abs(mean[sl] - mean_ref[sl]) < nsigma * err[sl]).all()
if not ok:
md = (numpy.abs(mean[sl] - mean_ref[sl])).max()
plt.title('point=%s, imt=%s, maxdiff=%.2e' % (lonlat, imt, md))
plt.loglog(imtls[imt], mean_ref[sl] + std_ref[sl],
label=str(calc_ref), color='black')
plt.loglog(imtls[imt], mean_ref[sl] - std_ref[sl],
color='black')
plt.loglog(imtls[imt], mean[sl] + std[sl], label=str(calc),
color='red')
plt.loglog(imtls[imt], mean[sl] - std[sl], color='red')
plt.legend()
plt.show() | python | def compare_mean_curves(calc_ref, calc, nsigma=3):
"""
Compare the hazard curves coming from two different calculations.
"""
dstore_ref = datastore.read(calc_ref)
dstore = datastore.read(calc)
imtls = dstore_ref['oqparam'].imtls
if dstore['oqparam'].imtls != imtls:
raise RuntimeError('The IMTs and levels are different between '
'calculation %d and %d' % (calc_ref, calc))
sitecol_ref = dstore_ref['sitecol']
sitecol = dstore['sitecol']
site_id_ref = {(lon, lat): sid for sid, lon, lat in zip(
sitecol_ref.sids, sitecol_ref.lons, sitecol_ref.lats)}
site_id = {(lon, lat): sid for sid, lon, lat in zip(
sitecol.sids, sitecol.lons, sitecol.lats)}
common = set(site_id_ref) & set(site_id)
if not common:
raise RuntimeError('There are no common sites between calculation '
'%d and %d' % (calc_ref, calc))
pmap_ref = PmapGetter(dstore_ref, sids=[site_id_ref[lonlat]
for lonlat in common]).get_mean()
pmap = PmapGetter(dstore, sids=[site_id[lonlat]
for lonlat in common]).get_mean()
for lonlat in common:
mean, std = pmap[site_id[lonlat]].array.T # shape (2, N)
mean_ref, std_ref = pmap_ref[site_id_ref[lonlat]].array.T
err = numpy.sqrt(std**2 + std_ref**2)
for imt in imtls:
sl = imtls(imt)
ok = (numpy.abs(mean[sl] - mean_ref[sl]) < nsigma * err[sl]).all()
if not ok:
md = (numpy.abs(mean[sl] - mean_ref[sl])).max()
plt.title('point=%s, imt=%s, maxdiff=%.2e' % (lonlat, imt, md))
plt.loglog(imtls[imt], mean_ref[sl] + std_ref[sl],
label=str(calc_ref), color='black')
plt.loglog(imtls[imt], mean_ref[sl] - std_ref[sl],
color='black')
plt.loglog(imtls[imt], mean[sl] + std[sl], label=str(calc),
color='red')
plt.loglog(imtls[imt], mean[sl] - std[sl], color='red')
plt.legend()
plt.show() | [
"def",
"compare_mean_curves",
"(",
"calc_ref",
",",
"calc",
",",
"nsigma",
"=",
"3",
")",
":",
"dstore_ref",
"=",
"datastore",
".",
"read",
"(",
"calc_ref",
")",
"dstore",
"=",
"datastore",
".",
"read",
"(",
"calc",
")",
"imtls",
"=",
"dstore_ref",
"[",
"'oqparam'",
"]",
".",
"imtls",
"if",
"dstore",
"[",
"'oqparam'",
"]",
".",
"imtls",
"!=",
"imtls",
":",
"raise",
"RuntimeError",
"(",
"'The IMTs and levels are different between '",
"'calculation %d and %d'",
"%",
"(",
"calc_ref",
",",
"calc",
")",
")",
"sitecol_ref",
"=",
"dstore_ref",
"[",
"'sitecol'",
"]",
"sitecol",
"=",
"dstore",
"[",
"'sitecol'",
"]",
"site_id_ref",
"=",
"{",
"(",
"lon",
",",
"lat",
")",
":",
"sid",
"for",
"sid",
",",
"lon",
",",
"lat",
"in",
"zip",
"(",
"sitecol_ref",
".",
"sids",
",",
"sitecol_ref",
".",
"lons",
",",
"sitecol_ref",
".",
"lats",
")",
"}",
"site_id",
"=",
"{",
"(",
"lon",
",",
"lat",
")",
":",
"sid",
"for",
"sid",
",",
"lon",
",",
"lat",
"in",
"zip",
"(",
"sitecol",
".",
"sids",
",",
"sitecol",
".",
"lons",
",",
"sitecol",
".",
"lats",
")",
"}",
"common",
"=",
"set",
"(",
"site_id_ref",
")",
"&",
"set",
"(",
"site_id",
")",
"if",
"not",
"common",
":",
"raise",
"RuntimeError",
"(",
"'There are no common sites between calculation '",
"'%d and %d'",
"%",
"(",
"calc_ref",
",",
"calc",
")",
")",
"pmap_ref",
"=",
"PmapGetter",
"(",
"dstore_ref",
",",
"sids",
"=",
"[",
"site_id_ref",
"[",
"lonlat",
"]",
"for",
"lonlat",
"in",
"common",
"]",
")",
".",
"get_mean",
"(",
")",
"pmap",
"=",
"PmapGetter",
"(",
"dstore",
",",
"sids",
"=",
"[",
"site_id",
"[",
"lonlat",
"]",
"for",
"lonlat",
"in",
"common",
"]",
")",
".",
"get_mean",
"(",
")",
"for",
"lonlat",
"in",
"common",
":",
"mean",
",",
"std",
"=",
"pmap",
"[",
"site_id",
"[",
"lonlat",
"]",
"]",
".",
"array",
".",
"T",
"# shape (2, N)",
"mean_ref",
",",
"std_ref",
"=",
"pmap_ref",
"[",
"site_id_ref",
"[",
"lonlat",
"]",
"]",
".",
"array",
".",
"T",
"err",
"=",
"numpy",
".",
"sqrt",
"(",
"std",
"**",
"2",
"+",
"std_ref",
"**",
"2",
")",
"for",
"imt",
"in",
"imtls",
":",
"sl",
"=",
"imtls",
"(",
"imt",
")",
"ok",
"=",
"(",
"numpy",
".",
"abs",
"(",
"mean",
"[",
"sl",
"]",
"-",
"mean_ref",
"[",
"sl",
"]",
")",
"<",
"nsigma",
"*",
"err",
"[",
"sl",
"]",
")",
".",
"all",
"(",
")",
"if",
"not",
"ok",
":",
"md",
"=",
"(",
"numpy",
".",
"abs",
"(",
"mean",
"[",
"sl",
"]",
"-",
"mean_ref",
"[",
"sl",
"]",
")",
")",
".",
"max",
"(",
")",
"plt",
".",
"title",
"(",
"'point=%s, imt=%s, maxdiff=%.2e'",
"%",
"(",
"lonlat",
",",
"imt",
",",
"md",
")",
")",
"plt",
".",
"loglog",
"(",
"imtls",
"[",
"imt",
"]",
",",
"mean_ref",
"[",
"sl",
"]",
"+",
"std_ref",
"[",
"sl",
"]",
",",
"label",
"=",
"str",
"(",
"calc_ref",
")",
",",
"color",
"=",
"'black'",
")",
"plt",
".",
"loglog",
"(",
"imtls",
"[",
"imt",
"]",
",",
"mean_ref",
"[",
"sl",
"]",
"-",
"std_ref",
"[",
"sl",
"]",
",",
"color",
"=",
"'black'",
")",
"plt",
".",
"loglog",
"(",
"imtls",
"[",
"imt",
"]",
",",
"mean",
"[",
"sl",
"]",
"+",
"std",
"[",
"sl",
"]",
",",
"label",
"=",
"str",
"(",
"calc",
")",
",",
"color",
"=",
"'red'",
")",
"plt",
".",
"loglog",
"(",
"imtls",
"[",
"imt",
"]",
",",
"mean",
"[",
"sl",
"]",
"-",
"std",
"[",
"sl",
"]",
",",
"color",
"=",
"'red'",
")",
"plt",
".",
"legend",
"(",
")",
"plt",
".",
"show",
"(",
")"
] | Compare the hazard curves coming from two different calculations. | [
"Compare",
"the",
"hazard",
"curves",
"coming",
"from",
"two",
"different",
"calculations",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/utils/compare_mean_curves.py#L27-L69 | train | 233,003 |
gem/oq-engine | openquake/hazardlib/gsim/chiou_youngs_2014.py | ChiouYoungs2014PEER._get_stddevs | def _get_stddevs(self, sites, rup, C, stddev_types, ln_y_ref, exp1, exp2):
"""
Returns the standard deviation, which is fixed at 0.65 for every site
"""
ret = []
for stddev_type in stddev_types:
assert stddev_type in self.DEFINED_FOR_STANDARD_DEVIATION_TYPES
if stddev_type == const.StdDev.TOTAL:
# eq. 13
ret.append(0.65 * np.ones_like(sites.vs30))
return ret | python | def _get_stddevs(self, sites, rup, C, stddev_types, ln_y_ref, exp1, exp2):
"""
Returns the standard deviation, which is fixed at 0.65 for every site
"""
ret = []
for stddev_type in stddev_types:
assert stddev_type in self.DEFINED_FOR_STANDARD_DEVIATION_TYPES
if stddev_type == const.StdDev.TOTAL:
# eq. 13
ret.append(0.65 * np.ones_like(sites.vs30))
return ret | [
"def",
"_get_stddevs",
"(",
"self",
",",
"sites",
",",
"rup",
",",
"C",
",",
"stddev_types",
",",
"ln_y_ref",
",",
"exp1",
",",
"exp2",
")",
":",
"ret",
"=",
"[",
"]",
"for",
"stddev_type",
"in",
"stddev_types",
":",
"assert",
"stddev_type",
"in",
"self",
".",
"DEFINED_FOR_STANDARD_DEVIATION_TYPES",
"if",
"stddev_type",
"==",
"const",
".",
"StdDev",
".",
"TOTAL",
":",
"# eq. 13",
"ret",
".",
"append",
"(",
"0.65",
"*",
"np",
".",
"ones_like",
"(",
"sites",
".",
"vs30",
")",
")",
"return",
"ret"
] | Returns the standard deviation, which is fixed at 0.65 for every site | [
"Returns",
"the",
"standard",
"deviation",
"which",
"is",
"fixed",
"at",
"0",
".",
"65",
"for",
"every",
"site"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/chiou_youngs_2014.py#L313-L323 | train | 233,004 |
gem/oq-engine | openquake/risklib/scientific.py | build_imls | def build_imls(ff, continuous_fragility_discretization,
steps_per_interval=0):
"""
Build intensity measure levels from a fragility function. If the function
is continuous, they are produced simply as a linear space between minIML
and maxIML. If the function is discrete, they are generated with a
complex logic depending on the noDamageLimit and the parameter
steps per interval.
:param ff: a fragility function object
:param continuous_fragility_discretization: .ini file parameter
:param steps_per_interval: .ini file parameter
:returns: generated imls
"""
if ff.format == 'discrete':
imls = ff.imls
if ff.nodamage and ff.nodamage < imls[0]:
imls = [ff.nodamage] + imls
if steps_per_interval > 1:
gen_imls = fine_graining(imls, steps_per_interval)
else:
gen_imls = imls
else: # continuous
gen_imls = numpy.linspace(ff.minIML, ff.maxIML,
continuous_fragility_discretization)
return gen_imls | python | def build_imls(ff, continuous_fragility_discretization,
steps_per_interval=0):
"""
Build intensity measure levels from a fragility function. If the function
is continuous, they are produced simply as a linear space between minIML
and maxIML. If the function is discrete, they are generated with a
complex logic depending on the noDamageLimit and the parameter
steps per interval.
:param ff: a fragility function object
:param continuous_fragility_discretization: .ini file parameter
:param steps_per_interval: .ini file parameter
:returns: generated imls
"""
if ff.format == 'discrete':
imls = ff.imls
if ff.nodamage and ff.nodamage < imls[0]:
imls = [ff.nodamage] + imls
if steps_per_interval > 1:
gen_imls = fine_graining(imls, steps_per_interval)
else:
gen_imls = imls
else: # continuous
gen_imls = numpy.linspace(ff.minIML, ff.maxIML,
continuous_fragility_discretization)
return gen_imls | [
"def",
"build_imls",
"(",
"ff",
",",
"continuous_fragility_discretization",
",",
"steps_per_interval",
"=",
"0",
")",
":",
"if",
"ff",
".",
"format",
"==",
"'discrete'",
":",
"imls",
"=",
"ff",
".",
"imls",
"if",
"ff",
".",
"nodamage",
"and",
"ff",
".",
"nodamage",
"<",
"imls",
"[",
"0",
"]",
":",
"imls",
"=",
"[",
"ff",
".",
"nodamage",
"]",
"+",
"imls",
"if",
"steps_per_interval",
">",
"1",
":",
"gen_imls",
"=",
"fine_graining",
"(",
"imls",
",",
"steps_per_interval",
")",
"else",
":",
"gen_imls",
"=",
"imls",
"else",
":",
"# continuous",
"gen_imls",
"=",
"numpy",
".",
"linspace",
"(",
"ff",
".",
"minIML",
",",
"ff",
".",
"maxIML",
",",
"continuous_fragility_discretization",
")",
"return",
"gen_imls"
] | Build intensity measure levels from a fragility function. If the function
is continuous, they are produced simply as a linear space between minIML
and maxIML. If the function is discrete, they are generated with a
complex logic depending on the noDamageLimit and the parameter
steps per interval.
:param ff: a fragility function object
:param continuous_fragility_discretization: .ini file parameter
:param steps_per_interval: .ini file parameter
:returns: generated imls | [
"Build",
"intensity",
"measure",
"levels",
"from",
"a",
"fragility",
"function",
".",
"If",
"the",
"function",
"is",
"continuous",
"they",
"are",
"produced",
"simply",
"as",
"a",
"linear",
"space",
"between",
"minIML",
"and",
"maxIML",
".",
"If",
"the",
"function",
"is",
"discrete",
"they",
"are",
"generated",
"with",
"a",
"complex",
"logic",
"depending",
"on",
"the",
"noDamageLimit",
"and",
"the",
"parameter",
"steps",
"per",
"interval",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/risklib/scientific.py#L665-L690 | train | 233,005 |
gem/oq-engine | openquake/risklib/scientific.py | insured_loss_curve | def insured_loss_curve(curve, deductible, insured_limit):
"""
Compute an insured loss ratio curve given a loss ratio curve
:param curve: an array 2 x R (where R is the curve resolution)
:param float deductible: the deductible limit in fraction form
:param float insured_limit: the insured limit in fraction form
>>> losses = numpy.array([3, 20, 101])
>>> poes = numpy.array([0.9, 0.5, 0.1])
>>> insured_loss_curve(numpy.array([losses, poes]), 5, 100)
array([[ 3. , 20. ],
[ 0.85294118, 0.5 ]])
"""
losses, poes = curve[:, curve[0] <= insured_limit]
limit_poe = interpolate.interp1d(
*curve, bounds_error=False, fill_value=1)(deductible)
return numpy.array([
losses,
numpy.piecewise(poes, [poes > limit_poe], [limit_poe, lambda x: x])]) | python | def insured_loss_curve(curve, deductible, insured_limit):
"""
Compute an insured loss ratio curve given a loss ratio curve
:param curve: an array 2 x R (where R is the curve resolution)
:param float deductible: the deductible limit in fraction form
:param float insured_limit: the insured limit in fraction form
>>> losses = numpy.array([3, 20, 101])
>>> poes = numpy.array([0.9, 0.5, 0.1])
>>> insured_loss_curve(numpy.array([losses, poes]), 5, 100)
array([[ 3. , 20. ],
[ 0.85294118, 0.5 ]])
"""
losses, poes = curve[:, curve[0] <= insured_limit]
limit_poe = interpolate.interp1d(
*curve, bounds_error=False, fill_value=1)(deductible)
return numpy.array([
losses,
numpy.piecewise(poes, [poes > limit_poe], [limit_poe, lambda x: x])]) | [
"def",
"insured_loss_curve",
"(",
"curve",
",",
"deductible",
",",
"insured_limit",
")",
":",
"losses",
",",
"poes",
"=",
"curve",
"[",
":",
",",
"curve",
"[",
"0",
"]",
"<=",
"insured_limit",
"]",
"limit_poe",
"=",
"interpolate",
".",
"interp1d",
"(",
"*",
"curve",
",",
"bounds_error",
"=",
"False",
",",
"fill_value",
"=",
"1",
")",
"(",
"deductible",
")",
"return",
"numpy",
".",
"array",
"(",
"[",
"losses",
",",
"numpy",
".",
"piecewise",
"(",
"poes",
",",
"[",
"poes",
">",
"limit_poe",
"]",
",",
"[",
"limit_poe",
",",
"lambda",
"x",
":",
"x",
"]",
")",
"]",
")"
] | Compute an insured loss ratio curve given a loss ratio curve
:param curve: an array 2 x R (where R is the curve resolution)
:param float deductible: the deductible limit in fraction form
:param float insured_limit: the insured limit in fraction form
>>> losses = numpy.array([3, 20, 101])
>>> poes = numpy.array([0.9, 0.5, 0.1])
>>> insured_loss_curve(numpy.array([losses, poes]), 5, 100)
array([[ 3. , 20. ],
[ 0.85294118, 0.5 ]]) | [
"Compute",
"an",
"insured",
"loss",
"ratio",
"curve",
"given",
"a",
"loss",
"ratio",
"curve"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/risklib/scientific.py#L1097-L1116 | train | 233,006 |
gem/oq-engine | openquake/risklib/scientific.py | bcr | def bcr(eal_original, eal_retrofitted, interest_rate,
asset_life_expectancy, asset_value, retrofitting_cost):
"""
Compute the Benefit-Cost Ratio.
BCR = (EALo - EALr)(1-exp(-r*t))/(r*C)
Where:
* BCR -- Benefit cost ratio
* EALo -- Expected annual loss for original asset
* EALr -- Expected annual loss for retrofitted asset
* r -- Interest rate
* t -- Life expectancy of the asset
* C -- Retrofitting cost
"""
return ((eal_original - eal_retrofitted) * asset_value *
(1 - numpy.exp(- interest_rate * asset_life_expectancy)) /
(interest_rate * retrofitting_cost)) | python | def bcr(eal_original, eal_retrofitted, interest_rate,
asset_life_expectancy, asset_value, retrofitting_cost):
"""
Compute the Benefit-Cost Ratio.
BCR = (EALo - EALr)(1-exp(-r*t))/(r*C)
Where:
* BCR -- Benefit cost ratio
* EALo -- Expected annual loss for original asset
* EALr -- Expected annual loss for retrofitted asset
* r -- Interest rate
* t -- Life expectancy of the asset
* C -- Retrofitting cost
"""
return ((eal_original - eal_retrofitted) * asset_value *
(1 - numpy.exp(- interest_rate * asset_life_expectancy)) /
(interest_rate * retrofitting_cost)) | [
"def",
"bcr",
"(",
"eal_original",
",",
"eal_retrofitted",
",",
"interest_rate",
",",
"asset_life_expectancy",
",",
"asset_value",
",",
"retrofitting_cost",
")",
":",
"return",
"(",
"(",
"eal_original",
"-",
"eal_retrofitted",
")",
"*",
"asset_value",
"*",
"(",
"1",
"-",
"numpy",
".",
"exp",
"(",
"-",
"interest_rate",
"*",
"asset_life_expectancy",
")",
")",
"/",
"(",
"interest_rate",
"*",
"retrofitting_cost",
")",
")"
] | Compute the Benefit-Cost Ratio.
BCR = (EALo - EALr)(1-exp(-r*t))/(r*C)
Where:
* BCR -- Benefit cost ratio
* EALo -- Expected annual loss for original asset
* EALr -- Expected annual loss for retrofitted asset
* r -- Interest rate
* t -- Life expectancy of the asset
* C -- Retrofitting cost | [
"Compute",
"the",
"Benefit",
"-",
"Cost",
"Ratio",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/risklib/scientific.py#L1124-L1142 | train | 233,007 |
gem/oq-engine | openquake/risklib/scientific.py | pairwise_mean | def pairwise_mean(values):
"Averages between a value and the next value in a sequence"
return numpy.array([numpy.mean(pair) for pair in pairwise(values)]) | python | def pairwise_mean(values):
"Averages between a value and the next value in a sequence"
return numpy.array([numpy.mean(pair) for pair in pairwise(values)]) | [
"def",
"pairwise_mean",
"(",
"values",
")",
":",
"return",
"numpy",
".",
"array",
"(",
"[",
"numpy",
".",
"mean",
"(",
"pair",
")",
"for",
"pair",
"in",
"pairwise",
"(",
"values",
")",
"]",
")"
] | Averages between a value and the next value in a sequence | [
"Averages",
"between",
"a",
"value",
"and",
"the",
"next",
"value",
"in",
"a",
"sequence"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/risklib/scientific.py#L1147-L1149 | train | 233,008 |
gem/oq-engine | openquake/risklib/scientific.py | pairwise_diff | def pairwise_diff(values):
"Differences between a value and the next value in a sequence"
return numpy.array([x - y for x, y in pairwise(values)]) | python | def pairwise_diff(values):
"Differences between a value and the next value in a sequence"
return numpy.array([x - y for x, y in pairwise(values)]) | [
"def",
"pairwise_diff",
"(",
"values",
")",
":",
"return",
"numpy",
".",
"array",
"(",
"[",
"x",
"-",
"y",
"for",
"x",
",",
"y",
"in",
"pairwise",
"(",
"values",
")",
"]",
")"
] | Differences between a value and the next value in a sequence | [
"Differences",
"between",
"a",
"value",
"and",
"the",
"next",
"value",
"in",
"a",
"sequence"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/risklib/scientific.py#L1152-L1154 | train | 233,009 |
gem/oq-engine | openquake/risklib/scientific.py | mean_std | def mean_std(fractions):
"""
Given an N x M matrix, returns mean and std computed on the rows,
i.e. two M-dimensional vectors.
"""
n = fractions.shape[0]
if n == 1: # avoid warnings when computing the stddev
return fractions[0], numpy.ones_like(fractions[0]) * numpy.nan
return numpy.mean(fractions, axis=0), numpy.std(fractions, axis=0, ddof=1) | python | def mean_std(fractions):
"""
Given an N x M matrix, returns mean and std computed on the rows,
i.e. two M-dimensional vectors.
"""
n = fractions.shape[0]
if n == 1: # avoid warnings when computing the stddev
return fractions[0], numpy.ones_like(fractions[0]) * numpy.nan
return numpy.mean(fractions, axis=0), numpy.std(fractions, axis=0, ddof=1) | [
"def",
"mean_std",
"(",
"fractions",
")",
":",
"n",
"=",
"fractions",
".",
"shape",
"[",
"0",
"]",
"if",
"n",
"==",
"1",
":",
"# avoid warnings when computing the stddev",
"return",
"fractions",
"[",
"0",
"]",
",",
"numpy",
".",
"ones_like",
"(",
"fractions",
"[",
"0",
"]",
")",
"*",
"numpy",
".",
"nan",
"return",
"numpy",
".",
"mean",
"(",
"fractions",
",",
"axis",
"=",
"0",
")",
",",
"numpy",
".",
"std",
"(",
"fractions",
",",
"axis",
"=",
"0",
",",
"ddof",
"=",
"1",
")"
] | Given an N x M matrix, returns mean and std computed on the rows,
i.e. two M-dimensional vectors. | [
"Given",
"an",
"N",
"x",
"M",
"matrix",
"returns",
"mean",
"and",
"std",
"computed",
"on",
"the",
"rows",
"i",
".",
"e",
".",
"two",
"M",
"-",
"dimensional",
"vectors",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/risklib/scientific.py#L1157-L1165 | train | 233,010 |
gem/oq-engine | openquake/risklib/scientific.py | broadcast | def broadcast(func, composite_array, *args):
"""
Broadcast an array function over a composite array
"""
dic = {}
dtypes = []
for name in composite_array.dtype.names:
dic[name] = func(composite_array[name], *args)
dtypes.append((name, dic[name].dtype))
res = numpy.zeros(dic[name].shape, numpy.dtype(dtypes))
for name in dic:
res[name] = dic[name]
return res | python | def broadcast(func, composite_array, *args):
"""
Broadcast an array function over a composite array
"""
dic = {}
dtypes = []
for name in composite_array.dtype.names:
dic[name] = func(composite_array[name], *args)
dtypes.append((name, dic[name].dtype))
res = numpy.zeros(dic[name].shape, numpy.dtype(dtypes))
for name in dic:
res[name] = dic[name]
return res | [
"def",
"broadcast",
"(",
"func",
",",
"composite_array",
",",
"*",
"args",
")",
":",
"dic",
"=",
"{",
"}",
"dtypes",
"=",
"[",
"]",
"for",
"name",
"in",
"composite_array",
".",
"dtype",
".",
"names",
":",
"dic",
"[",
"name",
"]",
"=",
"func",
"(",
"composite_array",
"[",
"name",
"]",
",",
"*",
"args",
")",
"dtypes",
".",
"append",
"(",
"(",
"name",
",",
"dic",
"[",
"name",
"]",
".",
"dtype",
")",
")",
"res",
"=",
"numpy",
".",
"zeros",
"(",
"dic",
"[",
"name",
"]",
".",
"shape",
",",
"numpy",
".",
"dtype",
"(",
"dtypes",
")",
")",
"for",
"name",
"in",
"dic",
":",
"res",
"[",
"name",
"]",
"=",
"dic",
"[",
"name",
"]",
"return",
"res"
] | Broadcast an array function over a composite array | [
"Broadcast",
"an",
"array",
"function",
"over",
"a",
"composite",
"array"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/risklib/scientific.py#L1184-L1196 | train | 233,011 |
gem/oq-engine | openquake/risklib/scientific.py | average_loss | def average_loss(lc):
"""
Given a loss curve array with `poe` and `loss` fields,
computes the average loss on a period of time.
:note: As the loss curve is supposed to be piecewise linear as it
is a result of a linear interpolation, we compute an exact
integral by using the trapeizodal rule with the width given by the
loss bin width.
"""
losses, poes = (lc['loss'], lc['poe']) if lc.dtype.names else lc
return -pairwise_diff(losses) @ pairwise_mean(poes) | python | def average_loss(lc):
"""
Given a loss curve array with `poe` and `loss` fields,
computes the average loss on a period of time.
:note: As the loss curve is supposed to be piecewise linear as it
is a result of a linear interpolation, we compute an exact
integral by using the trapeizodal rule with the width given by the
loss bin width.
"""
losses, poes = (lc['loss'], lc['poe']) if lc.dtype.names else lc
return -pairwise_diff(losses) @ pairwise_mean(poes) | [
"def",
"average_loss",
"(",
"lc",
")",
":",
"losses",
",",
"poes",
"=",
"(",
"lc",
"[",
"'loss'",
"]",
",",
"lc",
"[",
"'poe'",
"]",
")",
"if",
"lc",
".",
"dtype",
".",
"names",
"else",
"lc",
"return",
"-",
"pairwise_diff",
"(",
"losses",
")",
"@",
"pairwise_mean",
"(",
"poes",
")"
] | Given a loss curve array with `poe` and `loss` fields,
computes the average loss on a period of time.
:note: As the loss curve is supposed to be piecewise linear as it
is a result of a linear interpolation, we compute an exact
integral by using the trapeizodal rule with the width given by the
loss bin width. | [
"Given",
"a",
"loss",
"curve",
"array",
"with",
"poe",
"and",
"loss",
"fields",
"computes",
"the",
"average",
"loss",
"on",
"a",
"period",
"of",
"time",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/risklib/scientific.py#L1200-L1211 | train | 233,012 |
gem/oq-engine | openquake/risklib/scientific.py | normalize_curves_eb | def normalize_curves_eb(curves):
"""
A more sophisticated version of normalize_curves, used in the event
based calculator.
:param curves: a list of pairs (losses, poes)
:returns: first losses, all_poes
"""
# we assume non-decreasing losses, so losses[-1] is the maximum loss
non_zero_curves = [(losses, poes)
for losses, poes in curves if losses[-1] > 0]
if not non_zero_curves: # no damage. all zero curves
return curves[0][0], numpy.array([poes for _losses, poes in curves])
else: # standard case
max_losses = [losses[-1] for losses, _poes in non_zero_curves]
reference_curve = non_zero_curves[numpy.argmax(max_losses)]
loss_ratios = reference_curve[0]
curves_poes = [interpolate.interp1d(
losses, poes, bounds_error=False, fill_value=0)(loss_ratios)
for losses, poes in curves]
# fix degenerated case with flat curve
for cp in curves_poes:
if numpy.isnan(cp[0]):
cp[0] = 0
return loss_ratios, numpy.array(curves_poes) | python | def normalize_curves_eb(curves):
"""
A more sophisticated version of normalize_curves, used in the event
based calculator.
:param curves: a list of pairs (losses, poes)
:returns: first losses, all_poes
"""
# we assume non-decreasing losses, so losses[-1] is the maximum loss
non_zero_curves = [(losses, poes)
for losses, poes in curves if losses[-1] > 0]
if not non_zero_curves: # no damage. all zero curves
return curves[0][0], numpy.array([poes for _losses, poes in curves])
else: # standard case
max_losses = [losses[-1] for losses, _poes in non_zero_curves]
reference_curve = non_zero_curves[numpy.argmax(max_losses)]
loss_ratios = reference_curve[0]
curves_poes = [interpolate.interp1d(
losses, poes, bounds_error=False, fill_value=0)(loss_ratios)
for losses, poes in curves]
# fix degenerated case with flat curve
for cp in curves_poes:
if numpy.isnan(cp[0]):
cp[0] = 0
return loss_ratios, numpy.array(curves_poes) | [
"def",
"normalize_curves_eb",
"(",
"curves",
")",
":",
"# we assume non-decreasing losses, so losses[-1] is the maximum loss",
"non_zero_curves",
"=",
"[",
"(",
"losses",
",",
"poes",
")",
"for",
"losses",
",",
"poes",
"in",
"curves",
"if",
"losses",
"[",
"-",
"1",
"]",
">",
"0",
"]",
"if",
"not",
"non_zero_curves",
":",
"# no damage. all zero curves",
"return",
"curves",
"[",
"0",
"]",
"[",
"0",
"]",
",",
"numpy",
".",
"array",
"(",
"[",
"poes",
"for",
"_losses",
",",
"poes",
"in",
"curves",
"]",
")",
"else",
":",
"# standard case",
"max_losses",
"=",
"[",
"losses",
"[",
"-",
"1",
"]",
"for",
"losses",
",",
"_poes",
"in",
"non_zero_curves",
"]",
"reference_curve",
"=",
"non_zero_curves",
"[",
"numpy",
".",
"argmax",
"(",
"max_losses",
")",
"]",
"loss_ratios",
"=",
"reference_curve",
"[",
"0",
"]",
"curves_poes",
"=",
"[",
"interpolate",
".",
"interp1d",
"(",
"losses",
",",
"poes",
",",
"bounds_error",
"=",
"False",
",",
"fill_value",
"=",
"0",
")",
"(",
"loss_ratios",
")",
"for",
"losses",
",",
"poes",
"in",
"curves",
"]",
"# fix degenerated case with flat curve",
"for",
"cp",
"in",
"curves_poes",
":",
"if",
"numpy",
".",
"isnan",
"(",
"cp",
"[",
"0",
"]",
")",
":",
"cp",
"[",
"0",
"]",
"=",
"0",
"return",
"loss_ratios",
",",
"numpy",
".",
"array",
"(",
"curves_poes",
")"
] | A more sophisticated version of normalize_curves, used in the event
based calculator.
:param curves: a list of pairs (losses, poes)
:returns: first losses, all_poes | [
"A",
"more",
"sophisticated",
"version",
"of",
"normalize_curves",
"used",
"in",
"the",
"event",
"based",
"calculator",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/risklib/scientific.py#L1214-L1238 | train | 233,013 |
gem/oq-engine | openquake/risklib/scientific.py | VulnerabilityFunction.sample | def sample(self, means, covs, idxs, epsilons=None):
"""
Sample the epsilons and apply the corrections to the means.
This method is called only if there are nonzero covs.
:param means:
array of E' loss ratios
:param covs:
array of E' floats
:param idxs:
array of E booleans with E >= E'
:param epsilons:
array of E floats (or None)
:returns:
array of E' loss ratios
"""
if epsilons is None:
return means
self.set_distribution(epsilons)
res = self.distribution.sample(means, covs, means * covs, idxs)
return res | python | def sample(self, means, covs, idxs, epsilons=None):
"""
Sample the epsilons and apply the corrections to the means.
This method is called only if there are nonzero covs.
:param means:
array of E' loss ratios
:param covs:
array of E' floats
:param idxs:
array of E booleans with E >= E'
:param epsilons:
array of E floats (or None)
:returns:
array of E' loss ratios
"""
if epsilons is None:
return means
self.set_distribution(epsilons)
res = self.distribution.sample(means, covs, means * covs, idxs)
return res | [
"def",
"sample",
"(",
"self",
",",
"means",
",",
"covs",
",",
"idxs",
",",
"epsilons",
"=",
"None",
")",
":",
"if",
"epsilons",
"is",
"None",
":",
"return",
"means",
"self",
".",
"set_distribution",
"(",
"epsilons",
")",
"res",
"=",
"self",
".",
"distribution",
".",
"sample",
"(",
"means",
",",
"covs",
",",
"means",
"*",
"covs",
",",
"idxs",
")",
"return",
"res"
] | Sample the epsilons and apply the corrections to the means.
This method is called only if there are nonzero covs.
:param means:
array of E' loss ratios
:param covs:
array of E' floats
:param idxs:
array of E booleans with E >= E'
:param epsilons:
array of E floats (or None)
:returns:
array of E' loss ratios | [
"Sample",
"the",
"epsilons",
"and",
"apply",
"the",
"corrections",
"to",
"the",
"means",
".",
"This",
"method",
"is",
"called",
"only",
"if",
"there",
"are",
"nonzero",
"covs",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/risklib/scientific.py#L161-L181 | train | 233,014 |
gem/oq-engine | openquake/risklib/scientific.py | VulnerabilityFunction.mean_loss_ratios_with_steps | def mean_loss_ratios_with_steps(self, steps):
"""
Split the mean loss ratios, producing a new set of loss ratios. The new
set of loss ratios always includes 0.0 and 1.0
:param int steps:
the number of steps we make to go from one loss
ratio to the next. For example, if we have [0.5, 0.7]::
steps = 1 produces [0.0, 0.5, 0.7, 1]
steps = 2 produces [0.0, 0.25, 0.5, 0.6, 0.7, 0.85, 1]
steps = 3 produces [0.0, 0.17, 0.33, 0.5, 0.57, 0.63,
0.7, 0.8, 0.9, 1]
"""
loss_ratios = self.mean_loss_ratios
if min(loss_ratios) > 0.0:
# prepend with a zero
loss_ratios = numpy.concatenate([[0.0], loss_ratios])
if max(loss_ratios) < 1.0:
# append a 1.0
loss_ratios = numpy.concatenate([loss_ratios, [1.0]])
return fine_graining(loss_ratios, steps) | python | def mean_loss_ratios_with_steps(self, steps):
"""
Split the mean loss ratios, producing a new set of loss ratios. The new
set of loss ratios always includes 0.0 and 1.0
:param int steps:
the number of steps we make to go from one loss
ratio to the next. For example, if we have [0.5, 0.7]::
steps = 1 produces [0.0, 0.5, 0.7, 1]
steps = 2 produces [0.0, 0.25, 0.5, 0.6, 0.7, 0.85, 1]
steps = 3 produces [0.0, 0.17, 0.33, 0.5, 0.57, 0.63,
0.7, 0.8, 0.9, 1]
"""
loss_ratios = self.mean_loss_ratios
if min(loss_ratios) > 0.0:
# prepend with a zero
loss_ratios = numpy.concatenate([[0.0], loss_ratios])
if max(loss_ratios) < 1.0:
# append a 1.0
loss_ratios = numpy.concatenate([loss_ratios, [1.0]])
return fine_graining(loss_ratios, steps) | [
"def",
"mean_loss_ratios_with_steps",
"(",
"self",
",",
"steps",
")",
":",
"loss_ratios",
"=",
"self",
".",
"mean_loss_ratios",
"if",
"min",
"(",
"loss_ratios",
")",
">",
"0.0",
":",
"# prepend with a zero",
"loss_ratios",
"=",
"numpy",
".",
"concatenate",
"(",
"[",
"[",
"0.0",
"]",
",",
"loss_ratios",
"]",
")",
"if",
"max",
"(",
"loss_ratios",
")",
"<",
"1.0",
":",
"# append a 1.0",
"loss_ratios",
"=",
"numpy",
".",
"concatenate",
"(",
"[",
"loss_ratios",
",",
"[",
"1.0",
"]",
"]",
")",
"return",
"fine_graining",
"(",
"loss_ratios",
",",
"steps",
")"
] | Split the mean loss ratios, producing a new set of loss ratios. The new
set of loss ratios always includes 0.0 and 1.0
:param int steps:
the number of steps we make to go from one loss
ratio to the next. For example, if we have [0.5, 0.7]::
steps = 1 produces [0.0, 0.5, 0.7, 1]
steps = 2 produces [0.0, 0.25, 0.5, 0.6, 0.7, 0.85, 1]
steps = 3 produces [0.0, 0.17, 0.33, 0.5, 0.57, 0.63,
0.7, 0.8, 0.9, 1] | [
"Split",
"the",
"mean",
"loss",
"ratios",
"producing",
"a",
"new",
"set",
"of",
"loss",
"ratios",
".",
"The",
"new",
"set",
"of",
"loss",
"ratios",
"always",
"includes",
"0",
".",
"0",
"and",
"1",
".",
"0"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/risklib/scientific.py#L216-L240 | train | 233,015 |
gem/oq-engine | openquake/risklib/scientific.py | VulnerabilityFunctionWithPMF.sample | def sample(self, probs, _covs, idxs, epsilons):
"""
Sample the .loss_ratios with the given probabilities.
:param probs:
array of E' floats
:param _covs:
ignored, it is there only for API consistency
:param idxs:
array of E booleans with E >= E'
:param epsilons:
array of E floats
:returns:
array of E' probabilities
"""
self.set_distribution(epsilons)
return self.distribution.sample(self.loss_ratios, probs) | python | def sample(self, probs, _covs, idxs, epsilons):
"""
Sample the .loss_ratios with the given probabilities.
:param probs:
array of E' floats
:param _covs:
ignored, it is there only for API consistency
:param idxs:
array of E booleans with E >= E'
:param epsilons:
array of E floats
:returns:
array of E' probabilities
"""
self.set_distribution(epsilons)
return self.distribution.sample(self.loss_ratios, probs) | [
"def",
"sample",
"(",
"self",
",",
"probs",
",",
"_covs",
",",
"idxs",
",",
"epsilons",
")",
":",
"self",
".",
"set_distribution",
"(",
"epsilons",
")",
"return",
"self",
".",
"distribution",
".",
"sample",
"(",
"self",
".",
"loss_ratios",
",",
"probs",
")"
] | Sample the .loss_ratios with the given probabilities.
:param probs:
array of E' floats
:param _covs:
ignored, it is there only for API consistency
:param idxs:
array of E booleans with E >= E'
:param epsilons:
array of E floats
:returns:
array of E' probabilities | [
"Sample",
"the",
".",
"loss_ratios",
"with",
"the",
"given",
"probabilities",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/risklib/scientific.py#L406-L422 | train | 233,016 |
gem/oq-engine | openquake/risklib/scientific.py | FragilityModel.build | def build(self, continuous_fragility_discretization, steps_per_interval):
"""
Return a new FragilityModel instance, in which the values have been
replaced with FragilityFunctionList instances.
:param continuous_fragility_discretization:
configuration parameter
:param steps_per_interval:
configuration parameter
"""
newfm = copy.copy(self)
for key, ffl in self.items():
newfm[key] = ffl.build(self.limitStates,
continuous_fragility_discretization,
steps_per_interval)
return newfm | python | def build(self, continuous_fragility_discretization, steps_per_interval):
"""
Return a new FragilityModel instance, in which the values have been
replaced with FragilityFunctionList instances.
:param continuous_fragility_discretization:
configuration parameter
:param steps_per_interval:
configuration parameter
"""
newfm = copy.copy(self)
for key, ffl in self.items():
newfm[key] = ffl.build(self.limitStates,
continuous_fragility_discretization,
steps_per_interval)
return newfm | [
"def",
"build",
"(",
"self",
",",
"continuous_fragility_discretization",
",",
"steps_per_interval",
")",
":",
"newfm",
"=",
"copy",
".",
"copy",
"(",
"self",
")",
"for",
"key",
",",
"ffl",
"in",
"self",
".",
"items",
"(",
")",
":",
"newfm",
"[",
"key",
"]",
"=",
"ffl",
".",
"build",
"(",
"self",
".",
"limitStates",
",",
"continuous_fragility_discretization",
",",
"steps_per_interval",
")",
"return",
"newfm"
] | Return a new FragilityModel instance, in which the values have been
replaced with FragilityFunctionList instances.
:param continuous_fragility_discretization:
configuration parameter
:param steps_per_interval:
configuration parameter | [
"Return",
"a",
"new",
"FragilityModel",
"instance",
"in",
"which",
"the",
"values",
"have",
"been",
"replaced",
"with",
"FragilityFunctionList",
"instances",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/risklib/scientific.py#L719-L734 | train | 233,017 |
gem/oq-engine | openquake/calculators/event_based.py | compute_gmfs | def compute_gmfs(rupgetter, srcfilter, param, monitor):
"""
Compute GMFs and optionally hazard curves
"""
getter = GmfGetter(rupgetter, srcfilter, param['oqparam'])
with monitor('getting ruptures'):
getter.init()
return getter.compute_gmfs_curves(monitor) | python | def compute_gmfs(rupgetter, srcfilter, param, monitor):
"""
Compute GMFs and optionally hazard curves
"""
getter = GmfGetter(rupgetter, srcfilter, param['oqparam'])
with monitor('getting ruptures'):
getter.init()
return getter.compute_gmfs_curves(monitor) | [
"def",
"compute_gmfs",
"(",
"rupgetter",
",",
"srcfilter",
",",
"param",
",",
"monitor",
")",
":",
"getter",
"=",
"GmfGetter",
"(",
"rupgetter",
",",
"srcfilter",
",",
"param",
"[",
"'oqparam'",
"]",
")",
"with",
"monitor",
"(",
"'getting ruptures'",
")",
":",
"getter",
".",
"init",
"(",
")",
"return",
"getter",
".",
"compute_gmfs_curves",
"(",
"monitor",
")"
] | Compute GMFs and optionally hazard curves | [
"Compute",
"GMFs",
"and",
"optionally",
"hazard",
"curves"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/event_based.py#L82-L89 | train | 233,018 |
gem/oq-engine | openquake/hmtk/sources/complex_fault_source.py | mtkComplexFaultSource._get_minmax_edges | def _get_minmax_edges(self, edge):
'''
Updates the upper and lower depths based on the input edges
'''
if isinstance(edge, Line):
# For instance of line class need to loop over values
depth_vals = np.array([node.depth for node in edge.points])
else:
depth_vals = edge[:, 2]
temp_upper_depth = np.min(depth_vals)
if not self.upper_depth:
self.upper_depth = temp_upper_depth
else:
if temp_upper_depth < self.upper_depth:
self.upper_depth = temp_upper_depth
temp_lower_depth = np.max(depth_vals)
if not self.lower_depth:
self.lower_depth = temp_lower_depth
else:
if temp_lower_depth > self.lower_depth:
self.lower_depth = temp_lower_depth | python | def _get_minmax_edges(self, edge):
'''
Updates the upper and lower depths based on the input edges
'''
if isinstance(edge, Line):
# For instance of line class need to loop over values
depth_vals = np.array([node.depth for node in edge.points])
else:
depth_vals = edge[:, 2]
temp_upper_depth = np.min(depth_vals)
if not self.upper_depth:
self.upper_depth = temp_upper_depth
else:
if temp_upper_depth < self.upper_depth:
self.upper_depth = temp_upper_depth
temp_lower_depth = np.max(depth_vals)
if not self.lower_depth:
self.lower_depth = temp_lower_depth
else:
if temp_lower_depth > self.lower_depth:
self.lower_depth = temp_lower_depth | [
"def",
"_get_minmax_edges",
"(",
"self",
",",
"edge",
")",
":",
"if",
"isinstance",
"(",
"edge",
",",
"Line",
")",
":",
"# For instance of line class need to loop over values",
"depth_vals",
"=",
"np",
".",
"array",
"(",
"[",
"node",
".",
"depth",
"for",
"node",
"in",
"edge",
".",
"points",
"]",
")",
"else",
":",
"depth_vals",
"=",
"edge",
"[",
":",
",",
"2",
"]",
"temp_upper_depth",
"=",
"np",
".",
"min",
"(",
"depth_vals",
")",
"if",
"not",
"self",
".",
"upper_depth",
":",
"self",
".",
"upper_depth",
"=",
"temp_upper_depth",
"else",
":",
"if",
"temp_upper_depth",
"<",
"self",
".",
"upper_depth",
":",
"self",
".",
"upper_depth",
"=",
"temp_upper_depth",
"temp_lower_depth",
"=",
"np",
".",
"max",
"(",
"depth_vals",
")",
"if",
"not",
"self",
".",
"lower_depth",
":",
"self",
".",
"lower_depth",
"=",
"temp_lower_depth",
"else",
":",
"if",
"temp_lower_depth",
">",
"self",
".",
"lower_depth",
":",
"self",
".",
"lower_depth",
"=",
"temp_lower_depth"
] | Updates the upper and lower depths based on the input edges | [
"Updates",
"the",
"upper",
"and",
"lower",
"depths",
"based",
"on",
"the",
"input",
"edges"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/sources/complex_fault_source.py#L153-L175 | train | 233,019 |
gem/oq-engine | openquake/hazardlib/gsim/kotha_2016.py | KothaEtAl2016._get_magnitude_term | def _get_magnitude_term(self, C, mag):
"""
Returns the magnitude scaling term - equation 3
"""
if mag >= self.CONSTS["Mh"]:
return C["e1"] + C["b3"] * (mag - self.CONSTS["Mh"])
else:
return C["e1"] + (C["b1"] * (mag - self.CONSTS["Mh"])) +\
(C["b2"] * (mag - self.CONSTS["Mh"]) ** 2.) | python | def _get_magnitude_term(self, C, mag):
"""
Returns the magnitude scaling term - equation 3
"""
if mag >= self.CONSTS["Mh"]:
return C["e1"] + C["b3"] * (mag - self.CONSTS["Mh"])
else:
return C["e1"] + (C["b1"] * (mag - self.CONSTS["Mh"])) +\
(C["b2"] * (mag - self.CONSTS["Mh"]) ** 2.) | [
"def",
"_get_magnitude_term",
"(",
"self",
",",
"C",
",",
"mag",
")",
":",
"if",
"mag",
">=",
"self",
".",
"CONSTS",
"[",
"\"Mh\"",
"]",
":",
"return",
"C",
"[",
"\"e1\"",
"]",
"+",
"C",
"[",
"\"b3\"",
"]",
"*",
"(",
"mag",
"-",
"self",
".",
"CONSTS",
"[",
"\"Mh\"",
"]",
")",
"else",
":",
"return",
"C",
"[",
"\"e1\"",
"]",
"+",
"(",
"C",
"[",
"\"b1\"",
"]",
"*",
"(",
"mag",
"-",
"self",
".",
"CONSTS",
"[",
"\"Mh\"",
"]",
")",
")",
"+",
"(",
"C",
"[",
"\"b2\"",
"]",
"*",
"(",
"mag",
"-",
"self",
".",
"CONSTS",
"[",
"\"Mh\"",
"]",
")",
"**",
"2.",
")"
] | Returns the magnitude scaling term - equation 3 | [
"Returns",
"the",
"magnitude",
"scaling",
"term",
"-",
"equation",
"3"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/kotha_2016.py#L101-L109 | train | 233,020 |
gem/oq-engine | openquake/hazardlib/gsim/kotha_2016.py | KothaEtAl2016._get_distance_term | def _get_distance_term(self, C, rjb, mag):
"""
Returns the general distance scaling term - equation 2
"""
c_3 = self._get_anelastic_coeff(C)
rval = np.sqrt(rjb ** 2. + C["h"] ** 2.)
return (C["c1"] + C["c2"] * (mag - self.CONSTS["Mref"])) *\
np.log(rval / self.CONSTS["Rref"]) +\
c_3 * (rval - self.CONSTS["Rref"]) | python | def _get_distance_term(self, C, rjb, mag):
"""
Returns the general distance scaling term - equation 2
"""
c_3 = self._get_anelastic_coeff(C)
rval = np.sqrt(rjb ** 2. + C["h"] ** 2.)
return (C["c1"] + C["c2"] * (mag - self.CONSTS["Mref"])) *\
np.log(rval / self.CONSTS["Rref"]) +\
c_3 * (rval - self.CONSTS["Rref"]) | [
"def",
"_get_distance_term",
"(",
"self",
",",
"C",
",",
"rjb",
",",
"mag",
")",
":",
"c_3",
"=",
"self",
".",
"_get_anelastic_coeff",
"(",
"C",
")",
"rval",
"=",
"np",
".",
"sqrt",
"(",
"rjb",
"**",
"2.",
"+",
"C",
"[",
"\"h\"",
"]",
"**",
"2.",
")",
"return",
"(",
"C",
"[",
"\"c1\"",
"]",
"+",
"C",
"[",
"\"c2\"",
"]",
"*",
"(",
"mag",
"-",
"self",
".",
"CONSTS",
"[",
"\"Mref\"",
"]",
")",
")",
"*",
"np",
".",
"log",
"(",
"rval",
"/",
"self",
".",
"CONSTS",
"[",
"\"Rref\"",
"]",
")",
"+",
"c_3",
"*",
"(",
"rval",
"-",
"self",
".",
"CONSTS",
"[",
"\"Rref\"",
"]",
")"
] | Returns the general distance scaling term - equation 2 | [
"Returns",
"the",
"general",
"distance",
"scaling",
"term",
"-",
"equation",
"2"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/kotha_2016.py#L111-L119 | train | 233,021 |
gem/oq-engine | openquake/hazardlib/gsim/kotha_2016.py | KothaEtAl2016._get_site_term | def _get_site_term(self, C, vs30):
"""
Returns only a linear site amplification term
"""
dg1, dg2 = self._get_regional_site_term(C)
return (C["g1"] + dg1) + (C["g2"] + dg2) * np.log(vs30) | python | def _get_site_term(self, C, vs30):
"""
Returns only a linear site amplification term
"""
dg1, dg2 = self._get_regional_site_term(C)
return (C["g1"] + dg1) + (C["g2"] + dg2) * np.log(vs30) | [
"def",
"_get_site_term",
"(",
"self",
",",
"C",
",",
"vs30",
")",
":",
"dg1",
",",
"dg2",
"=",
"self",
".",
"_get_regional_site_term",
"(",
"C",
")",
"return",
"(",
"C",
"[",
"\"g1\"",
"]",
"+",
"dg1",
")",
"+",
"(",
"C",
"[",
"\"g2\"",
"]",
"+",
"dg2",
")",
"*",
"np",
".",
"log",
"(",
"vs30",
")"
] | Returns only a linear site amplification term | [
"Returns",
"only",
"a",
"linear",
"site",
"amplification",
"term"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/kotha_2016.py#L128-L133 | train | 233,022 |
gem/oq-engine | openquake/hazardlib/gsim/tusa_langer_2016.py | TusaLanger2016RepiBA08SE._get_stddevs | def _get_stddevs(self, C, stddev_types, num_sites):
"""
Return standard deviations as defined in tables below
"""
assert all(stddev_type in self.DEFINED_FOR_STANDARD_DEVIATION_TYPES
for stddev_type in stddev_types)
stddevs = [np.zeros(num_sites) + C['SigmaTot'] for _ in stddev_types]
return stddevs | python | def _get_stddevs(self, C, stddev_types, num_sites):
"""
Return standard deviations as defined in tables below
"""
assert all(stddev_type in self.DEFINED_FOR_STANDARD_DEVIATION_TYPES
for stddev_type in stddev_types)
stddevs = [np.zeros(num_sites) + C['SigmaTot'] for _ in stddev_types]
return stddevs | [
"def",
"_get_stddevs",
"(",
"self",
",",
"C",
",",
"stddev_types",
",",
"num_sites",
")",
":",
"assert",
"all",
"(",
"stddev_type",
"in",
"self",
".",
"DEFINED_FOR_STANDARD_DEVIATION_TYPES",
"for",
"stddev_type",
"in",
"stddev_types",
")",
"stddevs",
"=",
"[",
"np",
".",
"zeros",
"(",
"num_sites",
")",
"+",
"C",
"[",
"'SigmaTot'",
"]",
"for",
"_",
"in",
"stddev_types",
"]",
"return",
"stddevs"
] | Return standard deviations as defined in tables below | [
"Return",
"standard",
"deviations",
"as",
"defined",
"in",
"tables",
"below"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/tusa_langer_2016.py#L112-L119 | train | 233,023 |
gem/oq-engine | openquake/hazardlib/gsim/dowrickrhoades_2005.py | DowrickRhoades2005Asc._compute_mean | def _compute_mean(self, C, mag, rrup, hypo_depth, delta_R, delta_S,
delta_V, delta_I, vs30):
"""
Compute MMI Intensity Value as per Equation in Table 5 and
Table 7 pag 198.
"""
# mean is calculated for all the 4 classes using the same equation.
# For DowrickRhoades2005SSlab, the coefficients which don't appear in
# Model 3 equationare assigned to zero
mean = (C['A1'] + (C['A2'] + C['A2R'] * delta_R + C['A2V'] * delta_V) *
mag + (C['A3'] + C['A3S'] * delta_S + C['A3V'] * delta_V) *
np.log10(np.power((rrup**3 + C['d']**3), 1.0 / 3.0)) +
C['A4'] * hypo_depth + C['A5'] * delta_I)
# Get S site class term
S = self._get_site_class(vs30, mean)
# Add S amplification term to mean value
mean = mean + S
return mean | python | def _compute_mean(self, C, mag, rrup, hypo_depth, delta_R, delta_S,
delta_V, delta_I, vs30):
"""
Compute MMI Intensity Value as per Equation in Table 5 and
Table 7 pag 198.
"""
# mean is calculated for all the 4 classes using the same equation.
# For DowrickRhoades2005SSlab, the coefficients which don't appear in
# Model 3 equationare assigned to zero
mean = (C['A1'] + (C['A2'] + C['A2R'] * delta_R + C['A2V'] * delta_V) *
mag + (C['A3'] + C['A3S'] * delta_S + C['A3V'] * delta_V) *
np.log10(np.power((rrup**3 + C['d']**3), 1.0 / 3.0)) +
C['A4'] * hypo_depth + C['A5'] * delta_I)
# Get S site class term
S = self._get_site_class(vs30, mean)
# Add S amplification term to mean value
mean = mean + S
return mean | [
"def",
"_compute_mean",
"(",
"self",
",",
"C",
",",
"mag",
",",
"rrup",
",",
"hypo_depth",
",",
"delta_R",
",",
"delta_S",
",",
"delta_V",
",",
"delta_I",
",",
"vs30",
")",
":",
"# mean is calculated for all the 4 classes using the same equation.",
"# For DowrickRhoades2005SSlab, the coefficients which don't appear in",
"# Model 3 equationare assigned to zero",
"mean",
"=",
"(",
"C",
"[",
"'A1'",
"]",
"+",
"(",
"C",
"[",
"'A2'",
"]",
"+",
"C",
"[",
"'A2R'",
"]",
"*",
"delta_R",
"+",
"C",
"[",
"'A2V'",
"]",
"*",
"delta_V",
")",
"*",
"mag",
"+",
"(",
"C",
"[",
"'A3'",
"]",
"+",
"C",
"[",
"'A3S'",
"]",
"*",
"delta_S",
"+",
"C",
"[",
"'A3V'",
"]",
"*",
"delta_V",
")",
"*",
"np",
".",
"log10",
"(",
"np",
".",
"power",
"(",
"(",
"rrup",
"**",
"3",
"+",
"C",
"[",
"'d'",
"]",
"**",
"3",
")",
",",
"1.0",
"/",
"3.0",
")",
")",
"+",
"C",
"[",
"'A4'",
"]",
"*",
"hypo_depth",
"+",
"C",
"[",
"'A5'",
"]",
"*",
"delta_I",
")",
"# Get S site class term",
"S",
"=",
"self",
".",
"_get_site_class",
"(",
"vs30",
",",
"mean",
")",
"# Add S amplification term to mean value",
"mean",
"=",
"mean",
"+",
"S",
"return",
"mean"
] | Compute MMI Intensity Value as per Equation in Table 5 and
Table 7 pag 198. | [
"Compute",
"MMI",
"Intensity",
"Value",
"as",
"per",
"Equation",
"in",
"Table",
"5",
"and",
"Table",
"7",
"pag",
"198",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/dowrickrhoades_2005.py#L98-L120 | train | 233,024 |
gem/oq-engine | openquake/hazardlib/gsim/dowrickrhoades_2005.py | DowrickRhoades2005Asc._get_stddevs | def _get_stddevs(self, C, stddev_types, num_sites):
"""
Return total standard deviation as described in paragraph 5.2 pag 200.
"""
# interevent stddev
sigma_inter = C['tau'] + np.zeros(num_sites)
# intraevent std
sigma_intra = C['sigma'] + np.zeros(num_sites)
std = []
for stddev_type in stddev_types:
if stddev_type == const.StdDev.TOTAL:
# equation in section 5.2 page 200
std += [np.sqrt(sigma_intra**2 + sigma_inter**2)]
elif stddev_type == const.StdDev.INTRA_EVENT:
std.append(sigma_intra)
elif stddev_type == const.StdDev.INTER_EVENT:
std.append(sigma_inter)
return std | python | def _get_stddevs(self, C, stddev_types, num_sites):
"""
Return total standard deviation as described in paragraph 5.2 pag 200.
"""
# interevent stddev
sigma_inter = C['tau'] + np.zeros(num_sites)
# intraevent std
sigma_intra = C['sigma'] + np.zeros(num_sites)
std = []
for stddev_type in stddev_types:
if stddev_type == const.StdDev.TOTAL:
# equation in section 5.2 page 200
std += [np.sqrt(sigma_intra**2 + sigma_inter**2)]
elif stddev_type == const.StdDev.INTRA_EVENT:
std.append(sigma_intra)
elif stddev_type == const.StdDev.INTER_EVENT:
std.append(sigma_inter)
return std | [
"def",
"_get_stddevs",
"(",
"self",
",",
"C",
",",
"stddev_types",
",",
"num_sites",
")",
":",
"# interevent stddev",
"sigma_inter",
"=",
"C",
"[",
"'tau'",
"]",
"+",
"np",
".",
"zeros",
"(",
"num_sites",
")",
"# intraevent std",
"sigma_intra",
"=",
"C",
"[",
"'sigma'",
"]",
"+",
"np",
".",
"zeros",
"(",
"num_sites",
")",
"std",
"=",
"[",
"]",
"for",
"stddev_type",
"in",
"stddev_types",
":",
"if",
"stddev_type",
"==",
"const",
".",
"StdDev",
".",
"TOTAL",
":",
"# equation in section 5.2 page 200",
"std",
"+=",
"[",
"np",
".",
"sqrt",
"(",
"sigma_intra",
"**",
"2",
"+",
"sigma_inter",
"**",
"2",
")",
"]",
"elif",
"stddev_type",
"==",
"const",
".",
"StdDev",
".",
"INTRA_EVENT",
":",
"std",
".",
"append",
"(",
"sigma_intra",
")",
"elif",
"stddev_type",
"==",
"const",
".",
"StdDev",
".",
"INTER_EVENT",
":",
"std",
".",
"append",
"(",
"sigma_inter",
")",
"return",
"std"
] | Return total standard deviation as described in paragraph 5.2 pag 200. | [
"Return",
"total",
"standard",
"deviation",
"as",
"described",
"in",
"paragraph",
"5",
".",
"2",
"pag",
"200",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/dowrickrhoades_2005.py#L122-L143 | train | 233,025 |
gem/oq-engine | openquake/commands/plot_assets.py | plot_assets | def plot_assets(calc_id=-1, site_model=False):
"""
Plot the sites and the assets
"""
# NB: matplotlib is imported inside since it is a costly import
import matplotlib.pyplot as p
from openquake.hmtk.plotting.patch import PolygonPatch
dstore = util.read(calc_id)
try:
region = dstore['oqparam'].region
except KeyError:
region = None
sitecol = dstore['sitecol']
try:
assetcol = dstore['assetcol'].value
except AttributeError:
assetcol = dstore['assetcol'].array
fig = p.figure()
ax = fig.add_subplot(111)
if region:
pp = PolygonPatch(shapely.wkt.loads(region), alpha=0.1)
ax.add_patch(pp)
ax.grid(True)
if site_model and 'site_model' in dstore:
sm = dstore['site_model']
sm_lons, sm_lats = sm['lon'], sm['lat']
if len(sm_lons) > 1 and cross_idl(*sm_lons):
sm_lons %= 360
p.scatter(sm_lons, sm_lats, marker='.', color='orange')
p.scatter(sitecol.complete.lons, sitecol.complete.lats, marker='.',
color='gray')
p.scatter(assetcol['lon'], assetcol['lat'], marker='.', color='green')
p.scatter(sitecol.lons, sitecol.lats, marker='+', color='black')
if 'discarded' in dstore:
disc = numpy.unique(dstore['discarded'].value[['lon', 'lat']])
p.scatter(disc['lon'], disc['lat'], marker='x', color='red')
p.show() | python | def plot_assets(calc_id=-1, site_model=False):
"""
Plot the sites and the assets
"""
# NB: matplotlib is imported inside since it is a costly import
import matplotlib.pyplot as p
from openquake.hmtk.plotting.patch import PolygonPatch
dstore = util.read(calc_id)
try:
region = dstore['oqparam'].region
except KeyError:
region = None
sitecol = dstore['sitecol']
try:
assetcol = dstore['assetcol'].value
except AttributeError:
assetcol = dstore['assetcol'].array
fig = p.figure()
ax = fig.add_subplot(111)
if region:
pp = PolygonPatch(shapely.wkt.loads(region), alpha=0.1)
ax.add_patch(pp)
ax.grid(True)
if site_model and 'site_model' in dstore:
sm = dstore['site_model']
sm_lons, sm_lats = sm['lon'], sm['lat']
if len(sm_lons) > 1 and cross_idl(*sm_lons):
sm_lons %= 360
p.scatter(sm_lons, sm_lats, marker='.', color='orange')
p.scatter(sitecol.complete.lons, sitecol.complete.lats, marker='.',
color='gray')
p.scatter(assetcol['lon'], assetcol['lat'], marker='.', color='green')
p.scatter(sitecol.lons, sitecol.lats, marker='+', color='black')
if 'discarded' in dstore:
disc = numpy.unique(dstore['discarded'].value[['lon', 'lat']])
p.scatter(disc['lon'], disc['lat'], marker='x', color='red')
p.show() | [
"def",
"plot_assets",
"(",
"calc_id",
"=",
"-",
"1",
",",
"site_model",
"=",
"False",
")",
":",
"# NB: matplotlib is imported inside since it is a costly import",
"import",
"matplotlib",
".",
"pyplot",
"as",
"p",
"from",
"openquake",
".",
"hmtk",
".",
"plotting",
".",
"patch",
"import",
"PolygonPatch",
"dstore",
"=",
"util",
".",
"read",
"(",
"calc_id",
")",
"try",
":",
"region",
"=",
"dstore",
"[",
"'oqparam'",
"]",
".",
"region",
"except",
"KeyError",
":",
"region",
"=",
"None",
"sitecol",
"=",
"dstore",
"[",
"'sitecol'",
"]",
"try",
":",
"assetcol",
"=",
"dstore",
"[",
"'assetcol'",
"]",
".",
"value",
"except",
"AttributeError",
":",
"assetcol",
"=",
"dstore",
"[",
"'assetcol'",
"]",
".",
"array",
"fig",
"=",
"p",
".",
"figure",
"(",
")",
"ax",
"=",
"fig",
".",
"add_subplot",
"(",
"111",
")",
"if",
"region",
":",
"pp",
"=",
"PolygonPatch",
"(",
"shapely",
".",
"wkt",
".",
"loads",
"(",
"region",
")",
",",
"alpha",
"=",
"0.1",
")",
"ax",
".",
"add_patch",
"(",
"pp",
")",
"ax",
".",
"grid",
"(",
"True",
")",
"if",
"site_model",
"and",
"'site_model'",
"in",
"dstore",
":",
"sm",
"=",
"dstore",
"[",
"'site_model'",
"]",
"sm_lons",
",",
"sm_lats",
"=",
"sm",
"[",
"'lon'",
"]",
",",
"sm",
"[",
"'lat'",
"]",
"if",
"len",
"(",
"sm_lons",
")",
">",
"1",
"and",
"cross_idl",
"(",
"*",
"sm_lons",
")",
":",
"sm_lons",
"%=",
"360",
"p",
".",
"scatter",
"(",
"sm_lons",
",",
"sm_lats",
",",
"marker",
"=",
"'.'",
",",
"color",
"=",
"'orange'",
")",
"p",
".",
"scatter",
"(",
"sitecol",
".",
"complete",
".",
"lons",
",",
"sitecol",
".",
"complete",
".",
"lats",
",",
"marker",
"=",
"'.'",
",",
"color",
"=",
"'gray'",
")",
"p",
".",
"scatter",
"(",
"assetcol",
"[",
"'lon'",
"]",
",",
"assetcol",
"[",
"'lat'",
"]",
",",
"marker",
"=",
"'.'",
",",
"color",
"=",
"'green'",
")",
"p",
".",
"scatter",
"(",
"sitecol",
".",
"lons",
",",
"sitecol",
".",
"lats",
",",
"marker",
"=",
"'+'",
",",
"color",
"=",
"'black'",
")",
"if",
"'discarded'",
"in",
"dstore",
":",
"disc",
"=",
"numpy",
".",
"unique",
"(",
"dstore",
"[",
"'discarded'",
"]",
".",
"value",
"[",
"[",
"'lon'",
",",
"'lat'",
"]",
"]",
")",
"p",
".",
"scatter",
"(",
"disc",
"[",
"'lon'",
"]",
",",
"disc",
"[",
"'lat'",
"]",
",",
"marker",
"=",
"'x'",
",",
"color",
"=",
"'red'",
")",
"p",
".",
"show",
"(",
")"
] | Plot the sites and the assets | [
"Plot",
"the",
"sites",
"and",
"the",
"assets"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commands/plot_assets.py#L26-L62 | train | 233,026 |
gem/oq-engine | openquake/hmtk/seismicity/smoothing/smoothed_seismicity.py | _get_adjustment | def _get_adjustment(mag, year, mmin, completeness_year, t_f, mag_inc=0.1):
'''
If the magnitude is greater than the minimum in the completeness table
and the year is greater than the corresponding completeness year then
return the Weichert factor
:param float mag:
Magnitude of an earthquake
:param float year:
Year of earthquake
:param np.ndarray completeness_table:
Completeness table
:param float mag_inc:
Magnitude increment
:param float t_f:
Weichert adjustment factor
:returns:
Weichert adjustment factor is event is in complete part of catalogue
(0.0 otherwise)
'''
if len(completeness_year) == 1:
if (mag >= mmin) and (year >= completeness_year[0]):
# No adjustment needed - event weight == 1
return 1.0
else:
# Event should not be counted
return False
kval = int(((mag - mmin) / mag_inc)) + 1
if (kval >= 1) and (year >= completeness_year[kval - 1]):
return t_f
else:
return False | python | def _get_adjustment(mag, year, mmin, completeness_year, t_f, mag_inc=0.1):
'''
If the magnitude is greater than the minimum in the completeness table
and the year is greater than the corresponding completeness year then
return the Weichert factor
:param float mag:
Magnitude of an earthquake
:param float year:
Year of earthquake
:param np.ndarray completeness_table:
Completeness table
:param float mag_inc:
Magnitude increment
:param float t_f:
Weichert adjustment factor
:returns:
Weichert adjustment factor is event is in complete part of catalogue
(0.0 otherwise)
'''
if len(completeness_year) == 1:
if (mag >= mmin) and (year >= completeness_year[0]):
# No adjustment needed - event weight == 1
return 1.0
else:
# Event should not be counted
return False
kval = int(((mag - mmin) / mag_inc)) + 1
if (kval >= 1) and (year >= completeness_year[kval - 1]):
return t_f
else:
return False | [
"def",
"_get_adjustment",
"(",
"mag",
",",
"year",
",",
"mmin",
",",
"completeness_year",
",",
"t_f",
",",
"mag_inc",
"=",
"0.1",
")",
":",
"if",
"len",
"(",
"completeness_year",
")",
"==",
"1",
":",
"if",
"(",
"mag",
">=",
"mmin",
")",
"and",
"(",
"year",
">=",
"completeness_year",
"[",
"0",
"]",
")",
":",
"# No adjustment needed - event weight == 1",
"return",
"1.0",
"else",
":",
"# Event should not be counted",
"return",
"False",
"kval",
"=",
"int",
"(",
"(",
"(",
"mag",
"-",
"mmin",
")",
"/",
"mag_inc",
")",
")",
"+",
"1",
"if",
"(",
"kval",
">=",
"1",
")",
"and",
"(",
"year",
">=",
"completeness_year",
"[",
"kval",
"-",
"1",
"]",
")",
":",
"return",
"t_f",
"else",
":",
"return",
"False"
] | If the magnitude is greater than the minimum in the completeness table
and the year is greater than the corresponding completeness year then
return the Weichert factor
:param float mag:
Magnitude of an earthquake
:param float year:
Year of earthquake
:param np.ndarray completeness_table:
Completeness table
:param float mag_inc:
Magnitude increment
:param float t_f:
Weichert adjustment factor
:returns:
Weichert adjustment factor is event is in complete part of catalogue
(0.0 otherwise) | [
"If",
"the",
"magnitude",
"is",
"greater",
"than",
"the",
"minimum",
"in",
"the",
"completeness",
"table",
"and",
"the",
"year",
"is",
"greater",
"than",
"the",
"corresponding",
"completeness",
"year",
"then",
"return",
"the",
"Weichert",
"factor"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/seismicity/smoothing/smoothed_seismicity.py#L129-L167 | train | 233,027 |
gem/oq-engine | openquake/hmtk/seismicity/smoothing/smoothed_seismicity.py | get_catalogue_bounding_polygon | def get_catalogue_bounding_polygon(catalogue):
'''
Returns a polygon containing the bounding box of the catalogue
'''
upper_lon = np.max(catalogue.data['longitude'])
upper_lat = np.max(catalogue.data['latitude'])
lower_lon = np.min(catalogue.data['longitude'])
lower_lat = np.min(catalogue.data['latitude'])
return Polygon([Point(lower_lon, upper_lat), Point(upper_lon, upper_lat),
Point(upper_lon, lower_lat), Point(lower_lon, lower_lat)]) | python | def get_catalogue_bounding_polygon(catalogue):
'''
Returns a polygon containing the bounding box of the catalogue
'''
upper_lon = np.max(catalogue.data['longitude'])
upper_lat = np.max(catalogue.data['latitude'])
lower_lon = np.min(catalogue.data['longitude'])
lower_lat = np.min(catalogue.data['latitude'])
return Polygon([Point(lower_lon, upper_lat), Point(upper_lon, upper_lat),
Point(upper_lon, lower_lat), Point(lower_lon, lower_lat)]) | [
"def",
"get_catalogue_bounding_polygon",
"(",
"catalogue",
")",
":",
"upper_lon",
"=",
"np",
".",
"max",
"(",
"catalogue",
".",
"data",
"[",
"'longitude'",
"]",
")",
"upper_lat",
"=",
"np",
".",
"max",
"(",
"catalogue",
".",
"data",
"[",
"'latitude'",
"]",
")",
"lower_lon",
"=",
"np",
".",
"min",
"(",
"catalogue",
".",
"data",
"[",
"'longitude'",
"]",
")",
"lower_lat",
"=",
"np",
".",
"min",
"(",
"catalogue",
".",
"data",
"[",
"'latitude'",
"]",
")",
"return",
"Polygon",
"(",
"[",
"Point",
"(",
"lower_lon",
",",
"upper_lat",
")",
",",
"Point",
"(",
"upper_lon",
",",
"upper_lat",
")",
",",
"Point",
"(",
"upper_lon",
",",
"lower_lat",
")",
",",
"Point",
"(",
"lower_lon",
",",
"lower_lat",
")",
"]",
")"
] | Returns a polygon containing the bounding box of the catalogue | [
"Returns",
"a",
"polygon",
"containing",
"the",
"bounding",
"box",
"of",
"the",
"catalogue"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/seismicity/smoothing/smoothed_seismicity.py#L170-L180 | train | 233,028 |
gem/oq-engine | openquake/hmtk/seismicity/smoothing/smoothed_seismicity.py | Grid.make_from_catalogue | def make_from_catalogue(cls, catalogue, spacing, dilate):
'''
Defines the grid on the basis of the catalogue
'''
new = cls()
cat_bbox = get_catalogue_bounding_polygon(catalogue)
if dilate > 0:
cat_bbox = cat_bbox.dilate(dilate)
# Define Grid spacing
new.update({'xmin': np.min(cat_bbox.lons),
'xmax': np.max(cat_bbox.lons),
'xspc': spacing,
'ymin': np.min(cat_bbox.lats),
'ymax': np.max(cat_bbox.lats),
'yspc': spacing,
'zmin': 0.,
'zmax': np.max(catalogue.data['depth']),
'zspc': np.max(catalogue.data['depth'])})
if new['zmin'] == new['zmax'] == new['zspc'] == 0:
new['zmax'] = new['zspc'] = 1
return new | python | def make_from_catalogue(cls, catalogue, spacing, dilate):
'''
Defines the grid on the basis of the catalogue
'''
new = cls()
cat_bbox = get_catalogue_bounding_polygon(catalogue)
if dilate > 0:
cat_bbox = cat_bbox.dilate(dilate)
# Define Grid spacing
new.update({'xmin': np.min(cat_bbox.lons),
'xmax': np.max(cat_bbox.lons),
'xspc': spacing,
'ymin': np.min(cat_bbox.lats),
'ymax': np.max(cat_bbox.lats),
'yspc': spacing,
'zmin': 0.,
'zmax': np.max(catalogue.data['depth']),
'zspc': np.max(catalogue.data['depth'])})
if new['zmin'] == new['zmax'] == new['zspc'] == 0:
new['zmax'] = new['zspc'] = 1
return new | [
"def",
"make_from_catalogue",
"(",
"cls",
",",
"catalogue",
",",
"spacing",
",",
"dilate",
")",
":",
"new",
"=",
"cls",
"(",
")",
"cat_bbox",
"=",
"get_catalogue_bounding_polygon",
"(",
"catalogue",
")",
"if",
"dilate",
">",
"0",
":",
"cat_bbox",
"=",
"cat_bbox",
".",
"dilate",
"(",
"dilate",
")",
"# Define Grid spacing",
"new",
".",
"update",
"(",
"{",
"'xmin'",
":",
"np",
".",
"min",
"(",
"cat_bbox",
".",
"lons",
")",
",",
"'xmax'",
":",
"np",
".",
"max",
"(",
"cat_bbox",
".",
"lons",
")",
",",
"'xspc'",
":",
"spacing",
",",
"'ymin'",
":",
"np",
".",
"min",
"(",
"cat_bbox",
".",
"lats",
")",
",",
"'ymax'",
":",
"np",
".",
"max",
"(",
"cat_bbox",
".",
"lats",
")",
",",
"'yspc'",
":",
"spacing",
",",
"'zmin'",
":",
"0.",
",",
"'zmax'",
":",
"np",
".",
"max",
"(",
"catalogue",
".",
"data",
"[",
"'depth'",
"]",
")",
",",
"'zspc'",
":",
"np",
".",
"max",
"(",
"catalogue",
".",
"data",
"[",
"'depth'",
"]",
")",
"}",
")",
"if",
"new",
"[",
"'zmin'",
"]",
"==",
"new",
"[",
"'zmax'",
"]",
"==",
"new",
"[",
"'zspc'",
"]",
"==",
"0",
":",
"new",
"[",
"'zmax'",
"]",
"=",
"new",
"[",
"'zspc'",
"]",
"=",
"1",
"return",
"new"
] | Defines the grid on the basis of the catalogue | [
"Defines",
"the",
"grid",
"on",
"the",
"basis",
"of",
"the",
"catalogue"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/seismicity/smoothing/smoothed_seismicity.py#L81-L105 | train | 233,029 |
gem/oq-engine | openquake/hmtk/seismicity/smoothing/smoothed_seismicity.py | SmoothedSeismicity.write_to_csv | def write_to_csv(self, filename):
'''
Exports to simple csv
:param str filename:
Path to file for export
'''
fid = open(filename, 'wt')
# Create header list
header_info = ['Longitude', 'Latitude', 'Depth', 'Observed Count',
'Smoothed Rate', 'b-value']
writer = csv.DictWriter(fid, fieldnames=header_info)
headers = dict((name0, name0) for name0 in header_info)
# Write to file
writer.writerow(headers)
for row in self.data:
# institute crude compression by omitting points with no seismicity
# and taking advantage of the %g format
if row[4] == 0:
continue
row_dict = {'Longitude': '%g' % row[0],
'Latitude': '%g' % row[1],
'Depth': '%g' % row[2],
'Observed Count': '%d' % row[3],
'Smoothed Rate': '%.6g' % row[4],
'b-value': '%g' % self.bval}
writer.writerow(row_dict)
fid.close() | python | def write_to_csv(self, filename):
'''
Exports to simple csv
:param str filename:
Path to file for export
'''
fid = open(filename, 'wt')
# Create header list
header_info = ['Longitude', 'Latitude', 'Depth', 'Observed Count',
'Smoothed Rate', 'b-value']
writer = csv.DictWriter(fid, fieldnames=header_info)
headers = dict((name0, name0) for name0 in header_info)
# Write to file
writer.writerow(headers)
for row in self.data:
# institute crude compression by omitting points with no seismicity
# and taking advantage of the %g format
if row[4] == 0:
continue
row_dict = {'Longitude': '%g' % row[0],
'Latitude': '%g' % row[1],
'Depth': '%g' % row[2],
'Observed Count': '%d' % row[3],
'Smoothed Rate': '%.6g' % row[4],
'b-value': '%g' % self.bval}
writer.writerow(row_dict)
fid.close() | [
"def",
"write_to_csv",
"(",
"self",
",",
"filename",
")",
":",
"fid",
"=",
"open",
"(",
"filename",
",",
"'wt'",
")",
"# Create header list",
"header_info",
"=",
"[",
"'Longitude'",
",",
"'Latitude'",
",",
"'Depth'",
",",
"'Observed Count'",
",",
"'Smoothed Rate'",
",",
"'b-value'",
"]",
"writer",
"=",
"csv",
".",
"DictWriter",
"(",
"fid",
",",
"fieldnames",
"=",
"header_info",
")",
"headers",
"=",
"dict",
"(",
"(",
"name0",
",",
"name0",
")",
"for",
"name0",
"in",
"header_info",
")",
"# Write to file",
"writer",
".",
"writerow",
"(",
"headers",
")",
"for",
"row",
"in",
"self",
".",
"data",
":",
"# institute crude compression by omitting points with no seismicity",
"# and taking advantage of the %g format",
"if",
"row",
"[",
"4",
"]",
"==",
"0",
":",
"continue",
"row_dict",
"=",
"{",
"'Longitude'",
":",
"'%g'",
"%",
"row",
"[",
"0",
"]",
",",
"'Latitude'",
":",
"'%g'",
"%",
"row",
"[",
"1",
"]",
",",
"'Depth'",
":",
"'%g'",
"%",
"row",
"[",
"2",
"]",
",",
"'Observed Count'",
":",
"'%d'",
"%",
"row",
"[",
"3",
"]",
",",
"'Smoothed Rate'",
":",
"'%.6g'",
"%",
"row",
"[",
"4",
"]",
",",
"'b-value'",
":",
"'%g'",
"%",
"self",
".",
"bval",
"}",
"writer",
".",
"writerow",
"(",
"row_dict",
")",
"fid",
".",
"close",
"(",
")"
] | Exports to simple csv
:param str filename:
Path to file for export | [
"Exports",
"to",
"simple",
"csv"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/seismicity/smoothing/smoothed_seismicity.py#L491-L518 | train | 233,030 |
gem/oq-engine | openquake/commonlib/hazard_writers.py | _validate_hazard_metadata | def _validate_hazard_metadata(md):
"""
Validate metadata `dict` of attributes, which are more or less the same for
hazard curves, hazard maps, and disaggregation histograms.
:param dict md:
`dict` which can contain the following keys:
* statistics
* gsimlt_path
* smlt_path
* imt
* sa_period
* sa_damping
:raises:
:exc:`ValueError` if the metadata is not valid.
"""
if (md.get('statistics') is not None and (
md.get('smlt_path') is not None or
md.get('gsimlt_path') is not None)):
raise ValueError('Cannot specify both `statistics` and logic tree '
'paths')
if md.get('statistics') is not None:
# make sure only valid statistics types are specified
if md.get('statistics') not in ('mean', 'max', 'quantile', 'std'):
raise ValueError('`statistics` must be either `mean`, `max`, or '
'`quantile`')
else:
# must specify both logic tree paths
if md.get('smlt_path') is None or md.get('gsimlt_path') is None:
raise ValueError('Both logic tree paths are required for '
'non-statistical results')
if md.get('statistics') == 'quantile':
if md.get('quantile_value') is None:
raise ValueError('quantile stastics results require a quantile'
' value to be specified')
if not md.get('statistics') == 'quantile':
if md.get('quantile_value') is not None:
raise ValueError('Quantile value must be specified with '
'quantile statistics')
if md.get('imt') == 'SA':
if md.get('sa_period') is None:
raise ValueError('`sa_period` is required for IMT == `SA`')
if md.get('sa_damping') is None:
raise ValueError('`sa_damping` is required for IMT == `SA`') | python | def _validate_hazard_metadata(md):
"""
Validate metadata `dict` of attributes, which are more or less the same for
hazard curves, hazard maps, and disaggregation histograms.
:param dict md:
`dict` which can contain the following keys:
* statistics
* gsimlt_path
* smlt_path
* imt
* sa_period
* sa_damping
:raises:
:exc:`ValueError` if the metadata is not valid.
"""
if (md.get('statistics') is not None and (
md.get('smlt_path') is not None or
md.get('gsimlt_path') is not None)):
raise ValueError('Cannot specify both `statistics` and logic tree '
'paths')
if md.get('statistics') is not None:
# make sure only valid statistics types are specified
if md.get('statistics') not in ('mean', 'max', 'quantile', 'std'):
raise ValueError('`statistics` must be either `mean`, `max`, or '
'`quantile`')
else:
# must specify both logic tree paths
if md.get('smlt_path') is None or md.get('gsimlt_path') is None:
raise ValueError('Both logic tree paths are required for '
'non-statistical results')
if md.get('statistics') == 'quantile':
if md.get('quantile_value') is None:
raise ValueError('quantile stastics results require a quantile'
' value to be specified')
if not md.get('statistics') == 'quantile':
if md.get('quantile_value') is not None:
raise ValueError('Quantile value must be specified with '
'quantile statistics')
if md.get('imt') == 'SA':
if md.get('sa_period') is None:
raise ValueError('`sa_period` is required for IMT == `SA`')
if md.get('sa_damping') is None:
raise ValueError('`sa_damping` is required for IMT == `SA`') | [
"def",
"_validate_hazard_metadata",
"(",
"md",
")",
":",
"if",
"(",
"md",
".",
"get",
"(",
"'statistics'",
")",
"is",
"not",
"None",
"and",
"(",
"md",
".",
"get",
"(",
"'smlt_path'",
")",
"is",
"not",
"None",
"or",
"md",
".",
"get",
"(",
"'gsimlt_path'",
")",
"is",
"not",
"None",
")",
")",
":",
"raise",
"ValueError",
"(",
"'Cannot specify both `statistics` and logic tree '",
"'paths'",
")",
"if",
"md",
".",
"get",
"(",
"'statistics'",
")",
"is",
"not",
"None",
":",
"# make sure only valid statistics types are specified",
"if",
"md",
".",
"get",
"(",
"'statistics'",
")",
"not",
"in",
"(",
"'mean'",
",",
"'max'",
",",
"'quantile'",
",",
"'std'",
")",
":",
"raise",
"ValueError",
"(",
"'`statistics` must be either `mean`, `max`, or '",
"'`quantile`'",
")",
"else",
":",
"# must specify both logic tree paths",
"if",
"md",
".",
"get",
"(",
"'smlt_path'",
")",
"is",
"None",
"or",
"md",
".",
"get",
"(",
"'gsimlt_path'",
")",
"is",
"None",
":",
"raise",
"ValueError",
"(",
"'Both logic tree paths are required for '",
"'non-statistical results'",
")",
"if",
"md",
".",
"get",
"(",
"'statistics'",
")",
"==",
"'quantile'",
":",
"if",
"md",
".",
"get",
"(",
"'quantile_value'",
")",
"is",
"None",
":",
"raise",
"ValueError",
"(",
"'quantile stastics results require a quantile'",
"' value to be specified'",
")",
"if",
"not",
"md",
".",
"get",
"(",
"'statistics'",
")",
"==",
"'quantile'",
":",
"if",
"md",
".",
"get",
"(",
"'quantile_value'",
")",
"is",
"not",
"None",
":",
"raise",
"ValueError",
"(",
"'Quantile value must be specified with '",
"'quantile statistics'",
")",
"if",
"md",
".",
"get",
"(",
"'imt'",
")",
"==",
"'SA'",
":",
"if",
"md",
".",
"get",
"(",
"'sa_period'",
")",
"is",
"None",
":",
"raise",
"ValueError",
"(",
"'`sa_period` is required for IMT == `SA`'",
")",
"if",
"md",
".",
"get",
"(",
"'sa_damping'",
")",
"is",
"None",
":",
"raise",
"ValueError",
"(",
"'`sa_damping` is required for IMT == `SA`'",
")"
] | Validate metadata `dict` of attributes, which are more or less the same for
hazard curves, hazard maps, and disaggregation histograms.
:param dict md:
`dict` which can contain the following keys:
* statistics
* gsimlt_path
* smlt_path
* imt
* sa_period
* sa_damping
:raises:
:exc:`ValueError` if the metadata is not valid. | [
"Validate",
"metadata",
"dict",
"of",
"attributes",
"which",
"are",
"more",
"or",
"less",
"the",
"same",
"for",
"hazard",
"curves",
"hazard",
"maps",
"and",
"disaggregation",
"histograms",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/hazard_writers.py#L54-L103 | train | 233,031 |
gem/oq-engine | openquake/commonlib/hazard_writers.py | _set_metadata | def _set_metadata(element, metadata, attr_map, transform=str):
"""
Set metadata attributes on a given ``element``.
:param element:
:class:`xml.etree.ElementTree.Element` instance
:param metadata:
Dictionary of metadata items containing attribute data for ``element``.
:param attr_map:
Dictionary mapping of metadata key->attribute name.
:param transform:
A function accepting and returning a single value to be applied to each
attribute value. Defaults to `str`.
"""
for kw, attr in attr_map.items():
value = metadata.get(kw)
if value is not None:
element.set(attr, transform(value)) | python | def _set_metadata(element, metadata, attr_map, transform=str):
"""
Set metadata attributes on a given ``element``.
:param element:
:class:`xml.etree.ElementTree.Element` instance
:param metadata:
Dictionary of metadata items containing attribute data for ``element``.
:param attr_map:
Dictionary mapping of metadata key->attribute name.
:param transform:
A function accepting and returning a single value to be applied to each
attribute value. Defaults to `str`.
"""
for kw, attr in attr_map.items():
value = metadata.get(kw)
if value is not None:
element.set(attr, transform(value)) | [
"def",
"_set_metadata",
"(",
"element",
",",
"metadata",
",",
"attr_map",
",",
"transform",
"=",
"str",
")",
":",
"for",
"kw",
",",
"attr",
"in",
"attr_map",
".",
"items",
"(",
")",
":",
"value",
"=",
"metadata",
".",
"get",
"(",
"kw",
")",
"if",
"value",
"is",
"not",
"None",
":",
"element",
".",
"set",
"(",
"attr",
",",
"transform",
"(",
"value",
")",
")"
] | Set metadata attributes on a given ``element``.
:param element:
:class:`xml.etree.ElementTree.Element` instance
:param metadata:
Dictionary of metadata items containing attribute data for ``element``.
:param attr_map:
Dictionary mapping of metadata key->attribute name.
:param transform:
A function accepting and returning a single value to be applied to each
attribute value. Defaults to `str`. | [
"Set",
"metadata",
"attributes",
"on",
"a",
"given",
"element",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/hazard_writers.py#L106-L123 | train | 233,032 |
gem/oq-engine | openquake/commonlib/hazard_writers.py | HazardCurveXMLWriter.serialize | def serialize(self, data):
"""
Write a sequence of hazard curves to the specified file.
:param data:
Iterable of hazard curve data. Each datum must be an object with
the following attributes:
* poes: A list of probability of exceedence values (floats).
* location: An object representing the location of the curve; must
have `x` and `y` to represent lon and lat, respectively.
"""
with open(self.dest, 'wb') as fh:
root = et.Element('nrml')
self.add_hazard_curves(root, self.metadata, data)
nrml.write(list(root), fh) | python | def serialize(self, data):
"""
Write a sequence of hazard curves to the specified file.
:param data:
Iterable of hazard curve data. Each datum must be an object with
the following attributes:
* poes: A list of probability of exceedence values (floats).
* location: An object representing the location of the curve; must
have `x` and `y` to represent lon and lat, respectively.
"""
with open(self.dest, 'wb') as fh:
root = et.Element('nrml')
self.add_hazard_curves(root, self.metadata, data)
nrml.write(list(root), fh) | [
"def",
"serialize",
"(",
"self",
",",
"data",
")",
":",
"with",
"open",
"(",
"self",
".",
"dest",
",",
"'wb'",
")",
"as",
"fh",
":",
"root",
"=",
"et",
".",
"Element",
"(",
"'nrml'",
")",
"self",
".",
"add_hazard_curves",
"(",
"root",
",",
"self",
".",
"metadata",
",",
"data",
")",
"nrml",
".",
"write",
"(",
"list",
"(",
"root",
")",
",",
"fh",
")"
] | Write a sequence of hazard curves to the specified file.
:param data:
Iterable of hazard curve data. Each datum must be an object with
the following attributes:
* poes: A list of probability of exceedence values (floats).
* location: An object representing the location of the curve; must
have `x` and `y` to represent lon and lat, respectively. | [
"Write",
"a",
"sequence",
"of",
"hazard",
"curves",
"to",
"the",
"specified",
"file",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/hazard_writers.py#L177-L192 | train | 233,033 |
gem/oq-engine | openquake/commonlib/hazard_writers.py | HazardCurveXMLWriter.add_hazard_curves | def add_hazard_curves(self, root, metadata, data):
"""
Add hazard curves stored into `data` as child of the `root`
element with `metadata`. See the documentation of the method
`serialize` and the constructor for a description of `data`
and `metadata`, respectively.
"""
hazard_curves = et.SubElement(root, 'hazardCurves')
_set_metadata(hazard_curves, metadata, _ATTR_MAP)
imls_elem = et.SubElement(hazard_curves, 'IMLs')
imls_elem.text = ' '.join(map(scientificformat, metadata['imls']))
gml_ns = nrml.SERIALIZE_NS_MAP['gml']
for hc in data:
hc_elem = et.SubElement(hazard_curves, 'hazardCurve')
gml_point = et.SubElement(hc_elem, '{%s}Point' % gml_ns)
gml_pos = et.SubElement(gml_point, '{%s}pos' % gml_ns)
gml_pos.text = '%s %s' % (hc.location.x, hc.location.y)
poes_elem = et.SubElement(hc_elem, 'poEs')
poes_elem.text = ' '.join(map(scientificformat, hc.poes)) | python | def add_hazard_curves(self, root, metadata, data):
"""
Add hazard curves stored into `data` as child of the `root`
element with `metadata`. See the documentation of the method
`serialize` and the constructor for a description of `data`
and `metadata`, respectively.
"""
hazard_curves = et.SubElement(root, 'hazardCurves')
_set_metadata(hazard_curves, metadata, _ATTR_MAP)
imls_elem = et.SubElement(hazard_curves, 'IMLs')
imls_elem.text = ' '.join(map(scientificformat, metadata['imls']))
gml_ns = nrml.SERIALIZE_NS_MAP['gml']
for hc in data:
hc_elem = et.SubElement(hazard_curves, 'hazardCurve')
gml_point = et.SubElement(hc_elem, '{%s}Point' % gml_ns)
gml_pos = et.SubElement(gml_point, '{%s}pos' % gml_ns)
gml_pos.text = '%s %s' % (hc.location.x, hc.location.y)
poes_elem = et.SubElement(hc_elem, 'poEs')
poes_elem.text = ' '.join(map(scientificformat, hc.poes)) | [
"def",
"add_hazard_curves",
"(",
"self",
",",
"root",
",",
"metadata",
",",
"data",
")",
":",
"hazard_curves",
"=",
"et",
".",
"SubElement",
"(",
"root",
",",
"'hazardCurves'",
")",
"_set_metadata",
"(",
"hazard_curves",
",",
"metadata",
",",
"_ATTR_MAP",
")",
"imls_elem",
"=",
"et",
".",
"SubElement",
"(",
"hazard_curves",
",",
"'IMLs'",
")",
"imls_elem",
".",
"text",
"=",
"' '",
".",
"join",
"(",
"map",
"(",
"scientificformat",
",",
"metadata",
"[",
"'imls'",
"]",
")",
")",
"gml_ns",
"=",
"nrml",
".",
"SERIALIZE_NS_MAP",
"[",
"'gml'",
"]",
"for",
"hc",
"in",
"data",
":",
"hc_elem",
"=",
"et",
".",
"SubElement",
"(",
"hazard_curves",
",",
"'hazardCurve'",
")",
"gml_point",
"=",
"et",
".",
"SubElement",
"(",
"hc_elem",
",",
"'{%s}Point'",
"%",
"gml_ns",
")",
"gml_pos",
"=",
"et",
".",
"SubElement",
"(",
"gml_point",
",",
"'{%s}pos'",
"%",
"gml_ns",
")",
"gml_pos",
".",
"text",
"=",
"'%s %s'",
"%",
"(",
"hc",
".",
"location",
".",
"x",
",",
"hc",
".",
"location",
".",
"y",
")",
"poes_elem",
"=",
"et",
".",
"SubElement",
"(",
"hc_elem",
",",
"'poEs'",
")",
"poes_elem",
".",
"text",
"=",
"' '",
".",
"join",
"(",
"map",
"(",
"scientificformat",
",",
"hc",
".",
"poes",
")",
")"
] | Add hazard curves stored into `data` as child of the `root`
element with `metadata`. See the documentation of the method
`serialize` and the constructor for a description of `data`
and `metadata`, respectively. | [
"Add",
"hazard",
"curves",
"stored",
"into",
"data",
"as",
"child",
"of",
"the",
"root",
"element",
"with",
"metadata",
".",
"See",
"the",
"documentation",
"of",
"the",
"method",
"serialize",
"and",
"the",
"constructor",
"for",
"a",
"description",
"of",
"data",
"and",
"metadata",
"respectively",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/hazard_writers.py#L194-L215 | train | 233,034 |
gem/oq-engine | openquake/commonlib/hazard_writers.py | EventBasedGMFXMLWriter.serialize | def serialize(self, data, fmt='%10.7E'):
"""
Serialize a collection of ground motion fields to XML.
:param data:
An iterable of "GMF set" objects.
Each "GMF set" object should:
* have an `investigation_time` attribute
* have an `stochastic_event_set_id` attribute
* be iterable, yielding a sequence of "GMF" objects
Each "GMF" object should:
* have an `imt` attribute
* have an `sa_period` attribute (only if `imt` is 'SA')
* have an `sa_damping` attribute (only if `imt` is 'SA')
* have a `event_id` attribute (to indicate which rupture
contributed to this gmf)
* be iterable, yielding a sequence of "GMF node" objects
Each "GMF node" object should have:
* a `gmv` attribute (to indicate the ground motion value
* `lon` and `lat` attributes (to indicate the geographical location
of the ground motion field)
"""
gmf_set_nodes = []
for gmf_set in data:
gmf_set_node = Node('gmfSet')
if gmf_set.investigation_time:
gmf_set_node['investigationTime'] = str(
gmf_set.investigation_time)
gmf_set_node['stochasticEventSetId'] = str(
gmf_set.stochastic_event_set_id)
gmf_set_node.nodes = gen_gmfs(gmf_set)
gmf_set_nodes.append(gmf_set_node)
gmf_container = Node('gmfCollection')
gmf_container[SM_TREE_PATH] = self.sm_lt_path
gmf_container[GSIM_TREE_PATH] = self.gsim_lt_path
gmf_container.nodes = gmf_set_nodes
with open(self.dest, 'wb') as dest:
nrml.write([gmf_container], dest, fmt) | python | def serialize(self, data, fmt='%10.7E'):
"""
Serialize a collection of ground motion fields to XML.
:param data:
An iterable of "GMF set" objects.
Each "GMF set" object should:
* have an `investigation_time` attribute
* have an `stochastic_event_set_id` attribute
* be iterable, yielding a sequence of "GMF" objects
Each "GMF" object should:
* have an `imt` attribute
* have an `sa_period` attribute (only if `imt` is 'SA')
* have an `sa_damping` attribute (only if `imt` is 'SA')
* have a `event_id` attribute (to indicate which rupture
contributed to this gmf)
* be iterable, yielding a sequence of "GMF node" objects
Each "GMF node" object should have:
* a `gmv` attribute (to indicate the ground motion value
* `lon` and `lat` attributes (to indicate the geographical location
of the ground motion field)
"""
gmf_set_nodes = []
for gmf_set in data:
gmf_set_node = Node('gmfSet')
if gmf_set.investigation_time:
gmf_set_node['investigationTime'] = str(
gmf_set.investigation_time)
gmf_set_node['stochasticEventSetId'] = str(
gmf_set.stochastic_event_set_id)
gmf_set_node.nodes = gen_gmfs(gmf_set)
gmf_set_nodes.append(gmf_set_node)
gmf_container = Node('gmfCollection')
gmf_container[SM_TREE_PATH] = self.sm_lt_path
gmf_container[GSIM_TREE_PATH] = self.gsim_lt_path
gmf_container.nodes = gmf_set_nodes
with open(self.dest, 'wb') as dest:
nrml.write([gmf_container], dest, fmt) | [
"def",
"serialize",
"(",
"self",
",",
"data",
",",
"fmt",
"=",
"'%10.7E'",
")",
":",
"gmf_set_nodes",
"=",
"[",
"]",
"for",
"gmf_set",
"in",
"data",
":",
"gmf_set_node",
"=",
"Node",
"(",
"'gmfSet'",
")",
"if",
"gmf_set",
".",
"investigation_time",
":",
"gmf_set_node",
"[",
"'investigationTime'",
"]",
"=",
"str",
"(",
"gmf_set",
".",
"investigation_time",
")",
"gmf_set_node",
"[",
"'stochasticEventSetId'",
"]",
"=",
"str",
"(",
"gmf_set",
".",
"stochastic_event_set_id",
")",
"gmf_set_node",
".",
"nodes",
"=",
"gen_gmfs",
"(",
"gmf_set",
")",
"gmf_set_nodes",
".",
"append",
"(",
"gmf_set_node",
")",
"gmf_container",
"=",
"Node",
"(",
"'gmfCollection'",
")",
"gmf_container",
"[",
"SM_TREE_PATH",
"]",
"=",
"self",
".",
"sm_lt_path",
"gmf_container",
"[",
"GSIM_TREE_PATH",
"]",
"=",
"self",
".",
"gsim_lt_path",
"gmf_container",
".",
"nodes",
"=",
"gmf_set_nodes",
"with",
"open",
"(",
"self",
".",
"dest",
",",
"'wb'",
")",
"as",
"dest",
":",
"nrml",
".",
"write",
"(",
"[",
"gmf_container",
"]",
",",
"dest",
",",
"fmt",
")"
] | Serialize a collection of ground motion fields to XML.
:param data:
An iterable of "GMF set" objects.
Each "GMF set" object should:
* have an `investigation_time` attribute
* have an `stochastic_event_set_id` attribute
* be iterable, yielding a sequence of "GMF" objects
Each "GMF" object should:
* have an `imt` attribute
* have an `sa_period` attribute (only if `imt` is 'SA')
* have an `sa_damping` attribute (only if `imt` is 'SA')
* have a `event_id` attribute (to indicate which rupture
contributed to this gmf)
* be iterable, yielding a sequence of "GMF node" objects
Each "GMF node" object should have:
* a `gmv` attribute (to indicate the ground motion value
* `lon` and `lat` attributes (to indicate the geographical location
of the ground motion field) | [
"Serialize",
"a",
"collection",
"of",
"ground",
"motion",
"fields",
"to",
"XML",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/hazard_writers.py#L259-L303 | train | 233,035 |
gem/oq-engine | openquake/commonlib/hazard_writers.py | SESXMLWriter.serialize | def serialize(self, data, investigation_time):
"""
Serialize a collection of stochastic event sets to XML.
:param data:
A dictionary src_group_id -> list of
:class:`openquake.commonlib.calc.Rupture` objects.
Each Rupture should have the following attributes:
* `rupid`
* `events_by_ses`
* `magnitude`
* `strike`
* `dip`
* `rake`
* `tectonic_region_type`
* `is_from_fault_source` (a `bool`)
* `is_multi_surface` (a `bool`)
* `lons`
* `lats`
* `depths`
If `is_from_fault_source` is `True`, the rupture originated from a
simple or complex fault sources. In this case, `lons`, `lats`, and
`depths` should all be 2D arrays (of uniform shape). These
coordinate triples represent nodes of the rupture mesh.
If `is_from_fault_source` is `False`, the rupture originated from a
point or area source. In this case, the rupture is represented by a
quadrilateral planar surface. This planar surface is defined by 3D
vertices. In this case, the rupture should have the following
attributes:
* `top_left_corner`
* `top_right_corner`
* `bottom_right_corner`
* `bottom_left_corner`
Each of these should be a triple of `lon`, `lat`, `depth`.
If `is_multi_surface` is `True`, the rupture originated from a
multi-surface source. In this case, `lons`, `lats`, and `depths`
should have uniform length. The length should be a multiple of 4,
where each segment of 4 represents the corner points of a planar
surface in the following order:
* top left
* top right
* bottom left
* bottom right
Each of these should be a triple of `lon`, `lat`, `depth`.
:param investigation_time:
Investigation time parameter specified in the job.ini
"""
with open(self.dest, 'wb') as fh:
root = et.Element('nrml')
ses_container = et.SubElement(root, 'ruptureCollection')
ses_container.set('investigationTime', str(investigation_time))
for grp_id in sorted(data):
attrs = dict(
id=grp_id,
tectonicRegion=data[grp_id][0].tectonic_region_type)
sg = et.SubElement(ses_container, 'ruptureGroup', attrs)
for rupture in data[grp_id]:
rupture_to_element(rupture, sg)
nrml.write(list(root), fh) | python | def serialize(self, data, investigation_time):
"""
Serialize a collection of stochastic event sets to XML.
:param data:
A dictionary src_group_id -> list of
:class:`openquake.commonlib.calc.Rupture` objects.
Each Rupture should have the following attributes:
* `rupid`
* `events_by_ses`
* `magnitude`
* `strike`
* `dip`
* `rake`
* `tectonic_region_type`
* `is_from_fault_source` (a `bool`)
* `is_multi_surface` (a `bool`)
* `lons`
* `lats`
* `depths`
If `is_from_fault_source` is `True`, the rupture originated from a
simple or complex fault sources. In this case, `lons`, `lats`, and
`depths` should all be 2D arrays (of uniform shape). These
coordinate triples represent nodes of the rupture mesh.
If `is_from_fault_source` is `False`, the rupture originated from a
point or area source. In this case, the rupture is represented by a
quadrilateral planar surface. This planar surface is defined by 3D
vertices. In this case, the rupture should have the following
attributes:
* `top_left_corner`
* `top_right_corner`
* `bottom_right_corner`
* `bottom_left_corner`
Each of these should be a triple of `lon`, `lat`, `depth`.
If `is_multi_surface` is `True`, the rupture originated from a
multi-surface source. In this case, `lons`, `lats`, and `depths`
should have uniform length. The length should be a multiple of 4,
where each segment of 4 represents the corner points of a planar
surface in the following order:
* top left
* top right
* bottom left
* bottom right
Each of these should be a triple of `lon`, `lat`, `depth`.
:param investigation_time:
Investigation time parameter specified in the job.ini
"""
with open(self.dest, 'wb') as fh:
root = et.Element('nrml')
ses_container = et.SubElement(root, 'ruptureCollection')
ses_container.set('investigationTime', str(investigation_time))
for grp_id in sorted(data):
attrs = dict(
id=grp_id,
tectonicRegion=data[grp_id][0].tectonic_region_type)
sg = et.SubElement(ses_container, 'ruptureGroup', attrs)
for rupture in data[grp_id]:
rupture_to_element(rupture, sg)
nrml.write(list(root), fh) | [
"def",
"serialize",
"(",
"self",
",",
"data",
",",
"investigation_time",
")",
":",
"with",
"open",
"(",
"self",
".",
"dest",
",",
"'wb'",
")",
"as",
"fh",
":",
"root",
"=",
"et",
".",
"Element",
"(",
"'nrml'",
")",
"ses_container",
"=",
"et",
".",
"SubElement",
"(",
"root",
",",
"'ruptureCollection'",
")",
"ses_container",
".",
"set",
"(",
"'investigationTime'",
",",
"str",
"(",
"investigation_time",
")",
")",
"for",
"grp_id",
"in",
"sorted",
"(",
"data",
")",
":",
"attrs",
"=",
"dict",
"(",
"id",
"=",
"grp_id",
",",
"tectonicRegion",
"=",
"data",
"[",
"grp_id",
"]",
"[",
"0",
"]",
".",
"tectonic_region_type",
")",
"sg",
"=",
"et",
".",
"SubElement",
"(",
"ses_container",
",",
"'ruptureGroup'",
",",
"attrs",
")",
"for",
"rupture",
"in",
"data",
"[",
"grp_id",
"]",
":",
"rupture_to_element",
"(",
"rupture",
",",
"sg",
")",
"nrml",
".",
"write",
"(",
"list",
"(",
"root",
")",
",",
"fh",
")"
] | Serialize a collection of stochastic event sets to XML.
:param data:
A dictionary src_group_id -> list of
:class:`openquake.commonlib.calc.Rupture` objects.
Each Rupture should have the following attributes:
* `rupid`
* `events_by_ses`
* `magnitude`
* `strike`
* `dip`
* `rake`
* `tectonic_region_type`
* `is_from_fault_source` (a `bool`)
* `is_multi_surface` (a `bool`)
* `lons`
* `lats`
* `depths`
If `is_from_fault_source` is `True`, the rupture originated from a
simple or complex fault sources. In this case, `lons`, `lats`, and
`depths` should all be 2D arrays (of uniform shape). These
coordinate triples represent nodes of the rupture mesh.
If `is_from_fault_source` is `False`, the rupture originated from a
point or area source. In this case, the rupture is represented by a
quadrilateral planar surface. This planar surface is defined by 3D
vertices. In this case, the rupture should have the following
attributes:
* `top_left_corner`
* `top_right_corner`
* `bottom_right_corner`
* `bottom_left_corner`
Each of these should be a triple of `lon`, `lat`, `depth`.
If `is_multi_surface` is `True`, the rupture originated from a
multi-surface source. In this case, `lons`, `lats`, and `depths`
should have uniform length. The length should be a multiple of 4,
where each segment of 4 represents the corner points of a planar
surface in the following order:
* top left
* top right
* bottom left
* bottom right
Each of these should be a triple of `lon`, `lat`, `depth`.
:param investigation_time:
Investigation time parameter specified in the job.ini | [
"Serialize",
"a",
"collection",
"of",
"stochastic",
"event",
"sets",
"to",
"XML",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/hazard_writers.py#L440-L507 | train | 233,036 |
gem/oq-engine | openquake/commonlib/hazard_writers.py | HazardMapXMLWriter.serialize | def serialize(self, data):
"""
Serialize hazard map data to XML.
See :meth:`HazardMapWriter.serialize` for details about the expected
input.
"""
with open(self.dest, 'wb') as fh:
root = et.Element('nrml')
hazard_map = et.SubElement(root, 'hazardMap')
_set_metadata(hazard_map, self.metadata, _ATTR_MAP)
for lon, lat, iml in data:
node = et.SubElement(hazard_map, 'node')
node.set('lon', str(lon))
node.set('lat', str(lat))
node.set('iml', str(iml))
nrml.write(list(root), fh) | python | def serialize(self, data):
"""
Serialize hazard map data to XML.
See :meth:`HazardMapWriter.serialize` for details about the expected
input.
"""
with open(self.dest, 'wb') as fh:
root = et.Element('nrml')
hazard_map = et.SubElement(root, 'hazardMap')
_set_metadata(hazard_map, self.metadata, _ATTR_MAP)
for lon, lat, iml in data:
node = et.SubElement(hazard_map, 'node')
node.set('lon', str(lon))
node.set('lat', str(lat))
node.set('iml', str(iml))
nrml.write(list(root), fh) | [
"def",
"serialize",
"(",
"self",
",",
"data",
")",
":",
"with",
"open",
"(",
"self",
".",
"dest",
",",
"'wb'",
")",
"as",
"fh",
":",
"root",
"=",
"et",
".",
"Element",
"(",
"'nrml'",
")",
"hazard_map",
"=",
"et",
".",
"SubElement",
"(",
"root",
",",
"'hazardMap'",
")",
"_set_metadata",
"(",
"hazard_map",
",",
"self",
".",
"metadata",
",",
"_ATTR_MAP",
")",
"for",
"lon",
",",
"lat",
",",
"iml",
"in",
"data",
":",
"node",
"=",
"et",
".",
"SubElement",
"(",
"hazard_map",
",",
"'node'",
")",
"node",
".",
"set",
"(",
"'lon'",
",",
"str",
"(",
"lon",
")",
")",
"node",
".",
"set",
"(",
"'lat'",
",",
"str",
"(",
"lat",
")",
")",
"node",
".",
"set",
"(",
"'iml'",
",",
"str",
"(",
"iml",
")",
")",
"nrml",
".",
"write",
"(",
"list",
"(",
"root",
")",
",",
"fh",
")"
] | Serialize hazard map data to XML.
See :meth:`HazardMapWriter.serialize` for details about the expected
input. | [
"Serialize",
"hazard",
"map",
"data",
"to",
"XML",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/hazard_writers.py#L560-L578 | train | 233,037 |
gem/oq-engine | openquake/commonlib/hazard_writers.py | UHSXMLWriter.serialize | def serialize(self, data):
"""
Write a sequence of uniform hazard spectra to the specified file.
:param data:
Iterable of UHS data. Each datum must be an object with the
following attributes:
* imls: A sequence of Intensity Measure Levels
* location: An object representing the location of the curve; must
have `x` and `y` to represent lon and lat, respectively.
"""
gml_ns = nrml.SERIALIZE_NS_MAP['gml']
with open(self.dest, 'wb') as fh:
root = et.Element('nrml')
uh_spectra = et.SubElement(root, 'uniformHazardSpectra')
_set_metadata(uh_spectra, self.metadata, _ATTR_MAP)
periods_elem = et.SubElement(uh_spectra, 'periods')
periods_elem.text = ' '.join([str(x)
for x in self.metadata['periods']])
for uhs in data:
uhs_elem = et.SubElement(uh_spectra, 'uhs')
gml_point = et.SubElement(uhs_elem, '{%s}Point' % gml_ns)
gml_pos = et.SubElement(gml_point, '{%s}pos' % gml_ns)
gml_pos.text = '%s %s' % (uhs.location.x, uhs.location.y)
imls_elem = et.SubElement(uhs_elem, 'IMLs')
imls_elem.text = ' '.join(['%10.7E' % x for x in uhs.imls])
nrml.write(list(root), fh) | python | def serialize(self, data):
"""
Write a sequence of uniform hazard spectra to the specified file.
:param data:
Iterable of UHS data. Each datum must be an object with the
following attributes:
* imls: A sequence of Intensity Measure Levels
* location: An object representing the location of the curve; must
have `x` and `y` to represent lon and lat, respectively.
"""
gml_ns = nrml.SERIALIZE_NS_MAP['gml']
with open(self.dest, 'wb') as fh:
root = et.Element('nrml')
uh_spectra = et.SubElement(root, 'uniformHazardSpectra')
_set_metadata(uh_spectra, self.metadata, _ATTR_MAP)
periods_elem = et.SubElement(uh_spectra, 'periods')
periods_elem.text = ' '.join([str(x)
for x in self.metadata['periods']])
for uhs in data:
uhs_elem = et.SubElement(uh_spectra, 'uhs')
gml_point = et.SubElement(uhs_elem, '{%s}Point' % gml_ns)
gml_pos = et.SubElement(gml_point, '{%s}pos' % gml_ns)
gml_pos.text = '%s %s' % (uhs.location.x, uhs.location.y)
imls_elem = et.SubElement(uhs_elem, 'IMLs')
imls_elem.text = ' '.join(['%10.7E' % x for x in uhs.imls])
nrml.write(list(root), fh) | [
"def",
"serialize",
"(",
"self",
",",
"data",
")",
":",
"gml_ns",
"=",
"nrml",
".",
"SERIALIZE_NS_MAP",
"[",
"'gml'",
"]",
"with",
"open",
"(",
"self",
".",
"dest",
",",
"'wb'",
")",
"as",
"fh",
":",
"root",
"=",
"et",
".",
"Element",
"(",
"'nrml'",
")",
"uh_spectra",
"=",
"et",
".",
"SubElement",
"(",
"root",
",",
"'uniformHazardSpectra'",
")",
"_set_metadata",
"(",
"uh_spectra",
",",
"self",
".",
"metadata",
",",
"_ATTR_MAP",
")",
"periods_elem",
"=",
"et",
".",
"SubElement",
"(",
"uh_spectra",
",",
"'periods'",
")",
"periods_elem",
".",
"text",
"=",
"' '",
".",
"join",
"(",
"[",
"str",
"(",
"x",
")",
"for",
"x",
"in",
"self",
".",
"metadata",
"[",
"'periods'",
"]",
"]",
")",
"for",
"uhs",
"in",
"data",
":",
"uhs_elem",
"=",
"et",
".",
"SubElement",
"(",
"uh_spectra",
",",
"'uhs'",
")",
"gml_point",
"=",
"et",
".",
"SubElement",
"(",
"uhs_elem",
",",
"'{%s}Point'",
"%",
"gml_ns",
")",
"gml_pos",
"=",
"et",
".",
"SubElement",
"(",
"gml_point",
",",
"'{%s}pos'",
"%",
"gml_ns",
")",
"gml_pos",
".",
"text",
"=",
"'%s %s'",
"%",
"(",
"uhs",
".",
"location",
".",
"x",
",",
"uhs",
".",
"location",
".",
"y",
")",
"imls_elem",
"=",
"et",
".",
"SubElement",
"(",
"uhs_elem",
",",
"'IMLs'",
")",
"imls_elem",
".",
"text",
"=",
"' '",
".",
"join",
"(",
"[",
"'%10.7E'",
"%",
"x",
"for",
"x",
"in",
"uhs",
".",
"imls",
"]",
")",
"nrml",
".",
"write",
"(",
"list",
"(",
"root",
")",
",",
"fh",
")"
] | Write a sequence of uniform hazard spectra to the specified file.
:param data:
Iterable of UHS data. Each datum must be an object with the
following attributes:
* imls: A sequence of Intensity Measure Levels
* location: An object representing the location of the curve; must
have `x` and `y` to represent lon and lat, respectively. | [
"Write",
"a",
"sequence",
"of",
"uniform",
"hazard",
"spectra",
"to",
"the",
"specified",
"file",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/hazard_writers.py#L728-L761 | train | 233,038 |
gem/oq-engine | openquake/hmtk/seismicity/max_magnitude/kijko_sellevol_bayes.py | check_config | def check_config(config, data):
'''Check config file inputs
:param dict config:
Configuration settings for the function
'''
essential_keys = ['input_mmin', 'b-value', 'sigma-b']
for key in essential_keys:
if not key in config.keys():
raise ValueError('For KijkoSellevolBayes the key %s needs to '
'be set in the configuation' % key)
if 'tolerance' not in config.keys() or not config['tolerance']:
config['tolerance'] = 1E-5
if not config.get('maximum_iterations', False):
config['maximum_iterations'] = 1000
if config['input_mmin'] < np.min(data['magnitude']):
config['input_mmin'] = np.min(data['magnitude'])
if fabs(config['sigma-b'] < 1E-15):
raise ValueError('Sigma-b must be greater than zero!')
return config | python | def check_config(config, data):
'''Check config file inputs
:param dict config:
Configuration settings for the function
'''
essential_keys = ['input_mmin', 'b-value', 'sigma-b']
for key in essential_keys:
if not key in config.keys():
raise ValueError('For KijkoSellevolBayes the key %s needs to '
'be set in the configuation' % key)
if 'tolerance' not in config.keys() or not config['tolerance']:
config['tolerance'] = 1E-5
if not config.get('maximum_iterations', False):
config['maximum_iterations'] = 1000
if config['input_mmin'] < np.min(data['magnitude']):
config['input_mmin'] = np.min(data['magnitude'])
if fabs(config['sigma-b'] < 1E-15):
raise ValueError('Sigma-b must be greater than zero!')
return config | [
"def",
"check_config",
"(",
"config",
",",
"data",
")",
":",
"essential_keys",
"=",
"[",
"'input_mmin'",
",",
"'b-value'",
",",
"'sigma-b'",
"]",
"for",
"key",
"in",
"essential_keys",
":",
"if",
"not",
"key",
"in",
"config",
".",
"keys",
"(",
")",
":",
"raise",
"ValueError",
"(",
"'For KijkoSellevolBayes the key %s needs to '",
"'be set in the configuation'",
"%",
"key",
")",
"if",
"'tolerance'",
"not",
"in",
"config",
".",
"keys",
"(",
")",
"or",
"not",
"config",
"[",
"'tolerance'",
"]",
":",
"config",
"[",
"'tolerance'",
"]",
"=",
"1E-5",
"if",
"not",
"config",
".",
"get",
"(",
"'maximum_iterations'",
",",
"False",
")",
":",
"config",
"[",
"'maximum_iterations'",
"]",
"=",
"1000",
"if",
"config",
"[",
"'input_mmin'",
"]",
"<",
"np",
".",
"min",
"(",
"data",
"[",
"'magnitude'",
"]",
")",
":",
"config",
"[",
"'input_mmin'",
"]",
"=",
"np",
".",
"min",
"(",
"data",
"[",
"'magnitude'",
"]",
")",
"if",
"fabs",
"(",
"config",
"[",
"'sigma-b'",
"]",
"<",
"1E-15",
")",
":",
"raise",
"ValueError",
"(",
"'Sigma-b must be greater than zero!'",
")",
"return",
"config"
] | Check config file inputs
:param dict config:
Configuration settings for the function | [
"Check",
"config",
"file",
"inputs"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/seismicity/max_magnitude/kijko_sellevol_bayes.py#L60-L84 | train | 233,039 |
gem/oq-engine | openquake/hazardlib/gsim/toro_1997.py | ToroEtAl1997MblgNSHMP2008._compute_mean | def _compute_mean(self, C, mag, rjb):
"""
Compute ground motion mean value.
"""
# line 1686 in hazgridXnga2.f
ffc = self._compute_finite_fault_correction(mag)
d = np.sqrt(rjb ** 2 + (C['c7'] ** 2) * (ffc ** 2))
# lines 1663, 1694-1696 in hazgridXnga2.f
mean = (
C['c1'] + C['c2'] * (mag - 6.) +
C['c3'] * ((mag - 6.) ** 2) -
C['c4'] * np.log(d) - C['c6'] * d
)
factor = np.log(rjb / 100.)
idx = factor > 0
mean[idx] -= (C['c5'] - C['c4']) * factor[idx]
return mean | python | def _compute_mean(self, C, mag, rjb):
"""
Compute ground motion mean value.
"""
# line 1686 in hazgridXnga2.f
ffc = self._compute_finite_fault_correction(mag)
d = np.sqrt(rjb ** 2 + (C['c7'] ** 2) * (ffc ** 2))
# lines 1663, 1694-1696 in hazgridXnga2.f
mean = (
C['c1'] + C['c2'] * (mag - 6.) +
C['c3'] * ((mag - 6.) ** 2) -
C['c4'] * np.log(d) - C['c6'] * d
)
factor = np.log(rjb / 100.)
idx = factor > 0
mean[idx] -= (C['c5'] - C['c4']) * factor[idx]
return mean | [
"def",
"_compute_mean",
"(",
"self",
",",
"C",
",",
"mag",
",",
"rjb",
")",
":",
"# line 1686 in hazgridXnga2.f",
"ffc",
"=",
"self",
".",
"_compute_finite_fault_correction",
"(",
"mag",
")",
"d",
"=",
"np",
".",
"sqrt",
"(",
"rjb",
"**",
"2",
"+",
"(",
"C",
"[",
"'c7'",
"]",
"**",
"2",
")",
"*",
"(",
"ffc",
"**",
"2",
")",
")",
"# lines 1663, 1694-1696 in hazgridXnga2.f",
"mean",
"=",
"(",
"C",
"[",
"'c1'",
"]",
"+",
"C",
"[",
"'c2'",
"]",
"*",
"(",
"mag",
"-",
"6.",
")",
"+",
"C",
"[",
"'c3'",
"]",
"*",
"(",
"(",
"mag",
"-",
"6.",
")",
"**",
"2",
")",
"-",
"C",
"[",
"'c4'",
"]",
"*",
"np",
".",
"log",
"(",
"d",
")",
"-",
"C",
"[",
"'c6'",
"]",
"*",
"d",
")",
"factor",
"=",
"np",
".",
"log",
"(",
"rjb",
"/",
"100.",
")",
"idx",
"=",
"factor",
">",
"0",
"mean",
"[",
"idx",
"]",
"-=",
"(",
"C",
"[",
"'c5'",
"]",
"-",
"C",
"[",
"'c4'",
"]",
")",
"*",
"factor",
"[",
"idx",
"]",
"return",
"mean"
] | Compute ground motion mean value. | [
"Compute",
"ground",
"motion",
"mean",
"value",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/toro_1997.py#L110-L129 | train | 233,040 |
gem/oq-engine | openquake/hazardlib/gsim/toro_1997.py | ToroEtAl1997MblgNSHMP2008._compute_finite_fault_correction | def _compute_finite_fault_correction(self, mag):
"""
Compute finite fault correction term as geometric mean of correction
terms obtained from Mw values calculated with Johnston 1996 and
Atkinson and Boore 1987 conversion equations.
Implement equations as in lines 1653 - 1658 in hazgridXnga2.f
"""
mw_j96 = mblg_to_mw_johnston_96(mag)
mw_ab87 = mblg_to_mw_atkinson_boore_87(mag)
t1 = np.exp(-1.25 + 0.227 * mw_j96)
t2 = np.exp(-1.25 + 0.227 * mw_ab87)
return np.sqrt(t1 * t2) | python | def _compute_finite_fault_correction(self, mag):
"""
Compute finite fault correction term as geometric mean of correction
terms obtained from Mw values calculated with Johnston 1996 and
Atkinson and Boore 1987 conversion equations.
Implement equations as in lines 1653 - 1658 in hazgridXnga2.f
"""
mw_j96 = mblg_to_mw_johnston_96(mag)
mw_ab87 = mblg_to_mw_atkinson_boore_87(mag)
t1 = np.exp(-1.25 + 0.227 * mw_j96)
t2 = np.exp(-1.25 + 0.227 * mw_ab87)
return np.sqrt(t1 * t2) | [
"def",
"_compute_finite_fault_correction",
"(",
"self",
",",
"mag",
")",
":",
"mw_j96",
"=",
"mblg_to_mw_johnston_96",
"(",
"mag",
")",
"mw_ab87",
"=",
"mblg_to_mw_atkinson_boore_87",
"(",
"mag",
")",
"t1",
"=",
"np",
".",
"exp",
"(",
"-",
"1.25",
"+",
"0.227",
"*",
"mw_j96",
")",
"t2",
"=",
"np",
".",
"exp",
"(",
"-",
"1.25",
"+",
"0.227",
"*",
"mw_ab87",
")",
"return",
"np",
".",
"sqrt",
"(",
"t1",
"*",
"t2",
")"
] | Compute finite fault correction term as geometric mean of correction
terms obtained from Mw values calculated with Johnston 1996 and
Atkinson and Boore 1987 conversion equations.
Implement equations as in lines 1653 - 1658 in hazgridXnga2.f | [
"Compute",
"finite",
"fault",
"correction",
"term",
"as",
"geometric",
"mean",
"of",
"correction",
"terms",
"obtained",
"from",
"Mw",
"values",
"calculated",
"with",
"Johnston",
"1996",
"and",
"Atkinson",
"and",
"Boore",
"1987",
"conversion",
"equations",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/toro_1997.py#L131-L145 | train | 233,041 |
gem/oq-engine | openquake/commands/upgrade_nrml.py | get_vulnerability_functions_04 | def get_vulnerability_functions_04(fname):
"""
Parse the vulnerability model in NRML 0.4 format.
:param fname:
path of the vulnerability file
:returns:
a dictionary imt, taxonomy -> vulnerability function + vset
"""
categories = dict(assetCategory=set(), lossCategory=set(),
vulnerabilitySetID=set())
imts = set()
taxonomies = set()
vf_dict = {} # imt, taxonomy -> vulnerability function
for vset in nrml.read(fname).vulnerabilityModel:
categories['assetCategory'].add(vset['assetCategory'])
categories['lossCategory'].add(vset['lossCategory'])
categories['vulnerabilitySetID'].add(vset['vulnerabilitySetID'])
IML = vset.IML
imt_str = IML['IMT']
imls = ~IML
imts.add(imt_str)
for vfun in vset.getnodes('discreteVulnerability'):
taxonomy = vfun['vulnerabilityFunctionID']
if taxonomy in taxonomies:
raise InvalidFile(
'Duplicated vulnerabilityFunctionID: %s: %s, line %d' %
(taxonomy, fname, vfun.lineno))
taxonomies.add(taxonomy)
with context(fname, vfun):
loss_ratios = ~vfun.lossRatio
coefficients = ~vfun.coefficientsVariation
if len(loss_ratios) != len(imls):
raise InvalidFile(
'There are %d loss ratios, but %d imls: %s, line %d' %
(len(loss_ratios), len(imls), fname,
vfun.lossRatio.lineno))
if len(coefficients) != len(imls):
raise InvalidFile(
'There are %d coefficients, but %d imls: %s, line %d' %
(len(coefficients), len(imls), fname,
vfun.coefficientsVariation.lineno))
with context(fname, vfun):
vf_dict[imt_str, taxonomy] = scientific.VulnerabilityFunction(
taxonomy, imt_str, imls, loss_ratios, coefficients,
vfun['probabilisticDistribution'])
categories['id'] = '_'.join(sorted(categories['vulnerabilitySetID']))
del categories['vulnerabilitySetID']
return vf_dict, categories | python | def get_vulnerability_functions_04(fname):
"""
Parse the vulnerability model in NRML 0.4 format.
:param fname:
path of the vulnerability file
:returns:
a dictionary imt, taxonomy -> vulnerability function + vset
"""
categories = dict(assetCategory=set(), lossCategory=set(),
vulnerabilitySetID=set())
imts = set()
taxonomies = set()
vf_dict = {} # imt, taxonomy -> vulnerability function
for vset in nrml.read(fname).vulnerabilityModel:
categories['assetCategory'].add(vset['assetCategory'])
categories['lossCategory'].add(vset['lossCategory'])
categories['vulnerabilitySetID'].add(vset['vulnerabilitySetID'])
IML = vset.IML
imt_str = IML['IMT']
imls = ~IML
imts.add(imt_str)
for vfun in vset.getnodes('discreteVulnerability'):
taxonomy = vfun['vulnerabilityFunctionID']
if taxonomy in taxonomies:
raise InvalidFile(
'Duplicated vulnerabilityFunctionID: %s: %s, line %d' %
(taxonomy, fname, vfun.lineno))
taxonomies.add(taxonomy)
with context(fname, vfun):
loss_ratios = ~vfun.lossRatio
coefficients = ~vfun.coefficientsVariation
if len(loss_ratios) != len(imls):
raise InvalidFile(
'There are %d loss ratios, but %d imls: %s, line %d' %
(len(loss_ratios), len(imls), fname,
vfun.lossRatio.lineno))
if len(coefficients) != len(imls):
raise InvalidFile(
'There are %d coefficients, but %d imls: %s, line %d' %
(len(coefficients), len(imls), fname,
vfun.coefficientsVariation.lineno))
with context(fname, vfun):
vf_dict[imt_str, taxonomy] = scientific.VulnerabilityFunction(
taxonomy, imt_str, imls, loss_ratios, coefficients,
vfun['probabilisticDistribution'])
categories['id'] = '_'.join(sorted(categories['vulnerabilitySetID']))
del categories['vulnerabilitySetID']
return vf_dict, categories | [
"def",
"get_vulnerability_functions_04",
"(",
"fname",
")",
":",
"categories",
"=",
"dict",
"(",
"assetCategory",
"=",
"set",
"(",
")",
",",
"lossCategory",
"=",
"set",
"(",
")",
",",
"vulnerabilitySetID",
"=",
"set",
"(",
")",
")",
"imts",
"=",
"set",
"(",
")",
"taxonomies",
"=",
"set",
"(",
")",
"vf_dict",
"=",
"{",
"}",
"# imt, taxonomy -> vulnerability function",
"for",
"vset",
"in",
"nrml",
".",
"read",
"(",
"fname",
")",
".",
"vulnerabilityModel",
":",
"categories",
"[",
"'assetCategory'",
"]",
".",
"add",
"(",
"vset",
"[",
"'assetCategory'",
"]",
")",
"categories",
"[",
"'lossCategory'",
"]",
".",
"add",
"(",
"vset",
"[",
"'lossCategory'",
"]",
")",
"categories",
"[",
"'vulnerabilitySetID'",
"]",
".",
"add",
"(",
"vset",
"[",
"'vulnerabilitySetID'",
"]",
")",
"IML",
"=",
"vset",
".",
"IML",
"imt_str",
"=",
"IML",
"[",
"'IMT'",
"]",
"imls",
"=",
"~",
"IML",
"imts",
".",
"add",
"(",
"imt_str",
")",
"for",
"vfun",
"in",
"vset",
".",
"getnodes",
"(",
"'discreteVulnerability'",
")",
":",
"taxonomy",
"=",
"vfun",
"[",
"'vulnerabilityFunctionID'",
"]",
"if",
"taxonomy",
"in",
"taxonomies",
":",
"raise",
"InvalidFile",
"(",
"'Duplicated vulnerabilityFunctionID: %s: %s, line %d'",
"%",
"(",
"taxonomy",
",",
"fname",
",",
"vfun",
".",
"lineno",
")",
")",
"taxonomies",
".",
"add",
"(",
"taxonomy",
")",
"with",
"context",
"(",
"fname",
",",
"vfun",
")",
":",
"loss_ratios",
"=",
"~",
"vfun",
".",
"lossRatio",
"coefficients",
"=",
"~",
"vfun",
".",
"coefficientsVariation",
"if",
"len",
"(",
"loss_ratios",
")",
"!=",
"len",
"(",
"imls",
")",
":",
"raise",
"InvalidFile",
"(",
"'There are %d loss ratios, but %d imls: %s, line %d'",
"%",
"(",
"len",
"(",
"loss_ratios",
")",
",",
"len",
"(",
"imls",
")",
",",
"fname",
",",
"vfun",
".",
"lossRatio",
".",
"lineno",
")",
")",
"if",
"len",
"(",
"coefficients",
")",
"!=",
"len",
"(",
"imls",
")",
":",
"raise",
"InvalidFile",
"(",
"'There are %d coefficients, but %d imls: %s, line %d'",
"%",
"(",
"len",
"(",
"coefficients",
")",
",",
"len",
"(",
"imls",
")",
",",
"fname",
",",
"vfun",
".",
"coefficientsVariation",
".",
"lineno",
")",
")",
"with",
"context",
"(",
"fname",
",",
"vfun",
")",
":",
"vf_dict",
"[",
"imt_str",
",",
"taxonomy",
"]",
"=",
"scientific",
".",
"VulnerabilityFunction",
"(",
"taxonomy",
",",
"imt_str",
",",
"imls",
",",
"loss_ratios",
",",
"coefficients",
",",
"vfun",
"[",
"'probabilisticDistribution'",
"]",
")",
"categories",
"[",
"'id'",
"]",
"=",
"'_'",
".",
"join",
"(",
"sorted",
"(",
"categories",
"[",
"'vulnerabilitySetID'",
"]",
")",
")",
"del",
"categories",
"[",
"'vulnerabilitySetID'",
"]",
"return",
"vf_dict",
",",
"categories"
] | Parse the vulnerability model in NRML 0.4 format.
:param fname:
path of the vulnerability file
:returns:
a dictionary imt, taxonomy -> vulnerability function + vset | [
"Parse",
"the",
"vulnerability",
"model",
"in",
"NRML",
"0",
".",
"4",
"format",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commands/upgrade_nrml.py#L32-L80 | train | 233,042 |
gem/oq-engine | openquake/commands/upgrade_nrml.py | upgrade_file | def upgrade_file(path, multipoint):
"""Upgrade to the latest NRML version"""
node0 = nrml.read(path, chatty=False)[0]
shutil.copy(path, path + '.bak') # make a backup of the original file
tag = striptag(node0.tag)
gml = True
if tag == 'vulnerabilityModel':
vf_dict, cat_dict = get_vulnerability_functions_04(path)
# below I am converting into a NRML 0.5 vulnerabilityModel
node0 = Node(
'vulnerabilityModel', cat_dict,
nodes=[obj_to_node(val) for val in vf_dict.values()])
gml = False
elif tag == 'fragilityModel':
node0 = read_nrml.convert_fragility_model_04(
nrml.read(path)[0], path)
gml = False
elif tag == 'sourceModel':
node0 = nrml.read(path)[0]
dic = groupby(node0.nodes, operator.itemgetter('tectonicRegion'))
node0.nodes = [Node('sourceGroup',
dict(tectonicRegion=trt, name="group %s" % i),
nodes=srcs)
for i, (trt, srcs) in enumerate(dic.items(), 1)]
if multipoint:
sourceconverter.update_source_model(node0, path + '.bak')
with open(path, 'wb') as f:
nrml.write([node0], f, gml=gml) | python | def upgrade_file(path, multipoint):
"""Upgrade to the latest NRML version"""
node0 = nrml.read(path, chatty=False)[0]
shutil.copy(path, path + '.bak') # make a backup of the original file
tag = striptag(node0.tag)
gml = True
if tag == 'vulnerabilityModel':
vf_dict, cat_dict = get_vulnerability_functions_04(path)
# below I am converting into a NRML 0.5 vulnerabilityModel
node0 = Node(
'vulnerabilityModel', cat_dict,
nodes=[obj_to_node(val) for val in vf_dict.values()])
gml = False
elif tag == 'fragilityModel':
node0 = read_nrml.convert_fragility_model_04(
nrml.read(path)[0], path)
gml = False
elif tag == 'sourceModel':
node0 = nrml.read(path)[0]
dic = groupby(node0.nodes, operator.itemgetter('tectonicRegion'))
node0.nodes = [Node('sourceGroup',
dict(tectonicRegion=trt, name="group %s" % i),
nodes=srcs)
for i, (trt, srcs) in enumerate(dic.items(), 1)]
if multipoint:
sourceconverter.update_source_model(node0, path + '.bak')
with open(path, 'wb') as f:
nrml.write([node0], f, gml=gml) | [
"def",
"upgrade_file",
"(",
"path",
",",
"multipoint",
")",
":",
"node0",
"=",
"nrml",
".",
"read",
"(",
"path",
",",
"chatty",
"=",
"False",
")",
"[",
"0",
"]",
"shutil",
".",
"copy",
"(",
"path",
",",
"path",
"+",
"'.bak'",
")",
"# make a backup of the original file",
"tag",
"=",
"striptag",
"(",
"node0",
".",
"tag",
")",
"gml",
"=",
"True",
"if",
"tag",
"==",
"'vulnerabilityModel'",
":",
"vf_dict",
",",
"cat_dict",
"=",
"get_vulnerability_functions_04",
"(",
"path",
")",
"# below I am converting into a NRML 0.5 vulnerabilityModel",
"node0",
"=",
"Node",
"(",
"'vulnerabilityModel'",
",",
"cat_dict",
",",
"nodes",
"=",
"[",
"obj_to_node",
"(",
"val",
")",
"for",
"val",
"in",
"vf_dict",
".",
"values",
"(",
")",
"]",
")",
"gml",
"=",
"False",
"elif",
"tag",
"==",
"'fragilityModel'",
":",
"node0",
"=",
"read_nrml",
".",
"convert_fragility_model_04",
"(",
"nrml",
".",
"read",
"(",
"path",
")",
"[",
"0",
"]",
",",
"path",
")",
"gml",
"=",
"False",
"elif",
"tag",
"==",
"'sourceModel'",
":",
"node0",
"=",
"nrml",
".",
"read",
"(",
"path",
")",
"[",
"0",
"]",
"dic",
"=",
"groupby",
"(",
"node0",
".",
"nodes",
",",
"operator",
".",
"itemgetter",
"(",
"'tectonicRegion'",
")",
")",
"node0",
".",
"nodes",
"=",
"[",
"Node",
"(",
"'sourceGroup'",
",",
"dict",
"(",
"tectonicRegion",
"=",
"trt",
",",
"name",
"=",
"\"group %s\"",
"%",
"i",
")",
",",
"nodes",
"=",
"srcs",
")",
"for",
"i",
",",
"(",
"trt",
",",
"srcs",
")",
"in",
"enumerate",
"(",
"dic",
".",
"items",
"(",
")",
",",
"1",
")",
"]",
"if",
"multipoint",
":",
"sourceconverter",
".",
"update_source_model",
"(",
"node0",
",",
"path",
"+",
"'.bak'",
")",
"with",
"open",
"(",
"path",
",",
"'wb'",
")",
"as",
"f",
":",
"nrml",
".",
"write",
"(",
"[",
"node0",
"]",
",",
"f",
",",
"gml",
"=",
"gml",
")"
] | Upgrade to the latest NRML version | [
"Upgrade",
"to",
"the",
"latest",
"NRML",
"version"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commands/upgrade_nrml.py#L83-L110 | train | 233,043 |
gem/oq-engine | openquake/hazardlib/gsim/faccioli_2010.py | FaccioliEtAl2010._compute_term_3 | def _compute_term_3(self, C, rrup, mag):
"""
This computes the third term in equation 2, page 2.
"""
return (C['a3'] *
np.log10(rrup + C['a4'] * np.power(10, C['a5'] * mag))) | python | def _compute_term_3(self, C, rrup, mag):
"""
This computes the third term in equation 2, page 2.
"""
return (C['a3'] *
np.log10(rrup + C['a4'] * np.power(10, C['a5'] * mag))) | [
"def",
"_compute_term_3",
"(",
"self",
",",
"C",
",",
"rrup",
",",
"mag",
")",
":",
"return",
"(",
"C",
"[",
"'a3'",
"]",
"*",
"np",
".",
"log10",
"(",
"rrup",
"+",
"C",
"[",
"'a4'",
"]",
"*",
"np",
".",
"power",
"(",
"10",
",",
"C",
"[",
"'a5'",
"]",
"*",
"mag",
")",
")",
")"
] | This computes the third term in equation 2, page 2. | [
"This",
"computes",
"the",
"third",
"term",
"in",
"equation",
"2",
"page",
"2",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/faccioli_2010.py#L85-L90 | train | 233,044 |
gem/oq-engine | openquake/hmtk/sources/source_conversion_utils.py | mag_scale_rel_to_hazardlib | def mag_scale_rel_to_hazardlib(mag_scale_rel, use_default=False):
"""
Returns the magnitude scaling relation in a format readable by
openquake.hazardlib
"""
if isinstance(mag_scale_rel, BaseMSR):
return mag_scale_rel
elif isinstance(mag_scale_rel, str):
if not mag_scale_rel in SCALE_RELS.keys():
raise ValueError('Magnitude scaling relation %s not supported!'
% mag_scale_rel)
else:
return SCALE_RELS[mag_scale_rel]()
else:
if use_default:
# Returns the Wells and Coppersmith string
return WC1994()
else:
raise ValueError('Magnitude Scaling Relation Not Defined!') | python | def mag_scale_rel_to_hazardlib(mag_scale_rel, use_default=False):
"""
Returns the magnitude scaling relation in a format readable by
openquake.hazardlib
"""
if isinstance(mag_scale_rel, BaseMSR):
return mag_scale_rel
elif isinstance(mag_scale_rel, str):
if not mag_scale_rel in SCALE_RELS.keys():
raise ValueError('Magnitude scaling relation %s not supported!'
% mag_scale_rel)
else:
return SCALE_RELS[mag_scale_rel]()
else:
if use_default:
# Returns the Wells and Coppersmith string
return WC1994()
else:
raise ValueError('Magnitude Scaling Relation Not Defined!') | [
"def",
"mag_scale_rel_to_hazardlib",
"(",
"mag_scale_rel",
",",
"use_default",
"=",
"False",
")",
":",
"if",
"isinstance",
"(",
"mag_scale_rel",
",",
"BaseMSR",
")",
":",
"return",
"mag_scale_rel",
"elif",
"isinstance",
"(",
"mag_scale_rel",
",",
"str",
")",
":",
"if",
"not",
"mag_scale_rel",
"in",
"SCALE_RELS",
".",
"keys",
"(",
")",
":",
"raise",
"ValueError",
"(",
"'Magnitude scaling relation %s not supported!'",
"%",
"mag_scale_rel",
")",
"else",
":",
"return",
"SCALE_RELS",
"[",
"mag_scale_rel",
"]",
"(",
")",
"else",
":",
"if",
"use_default",
":",
"# Returns the Wells and Coppersmith string",
"return",
"WC1994",
"(",
")",
"else",
":",
"raise",
"ValueError",
"(",
"'Magnitude Scaling Relation Not Defined!'",
")"
] | Returns the magnitude scaling relation in a format readable by
openquake.hazardlib | [
"Returns",
"the",
"magnitude",
"scaling",
"relation",
"in",
"a",
"format",
"readable",
"by",
"openquake",
".",
"hazardlib"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/sources/source_conversion_utils.py#L79-L97 | train | 233,045 |
gem/oq-engine | openquake/hmtk/sources/source_conversion_utils.py | npd_to_pmf | def npd_to_pmf(nodal_plane_dist, use_default=False):
"""
Returns the nodal plane distribution as an instance of the PMF class
"""
if isinstance(nodal_plane_dist, PMF):
# Aready in PMF format - return
return nodal_plane_dist
else:
if use_default:
return PMF([(1.0, NodalPlane(0.0, 90.0, 0.0))])
else:
raise ValueError('Nodal Plane distribution not defined') | python | def npd_to_pmf(nodal_plane_dist, use_default=False):
"""
Returns the nodal plane distribution as an instance of the PMF class
"""
if isinstance(nodal_plane_dist, PMF):
# Aready in PMF format - return
return nodal_plane_dist
else:
if use_default:
return PMF([(1.0, NodalPlane(0.0, 90.0, 0.0))])
else:
raise ValueError('Nodal Plane distribution not defined') | [
"def",
"npd_to_pmf",
"(",
"nodal_plane_dist",
",",
"use_default",
"=",
"False",
")",
":",
"if",
"isinstance",
"(",
"nodal_plane_dist",
",",
"PMF",
")",
":",
"# Aready in PMF format - return",
"return",
"nodal_plane_dist",
"else",
":",
"if",
"use_default",
":",
"return",
"PMF",
"(",
"[",
"(",
"1.0",
",",
"NodalPlane",
"(",
"0.0",
",",
"90.0",
",",
"0.0",
")",
")",
"]",
")",
"else",
":",
"raise",
"ValueError",
"(",
"'Nodal Plane distribution not defined'",
")"
] | Returns the nodal plane distribution as an instance of the PMF class | [
"Returns",
"the",
"nodal",
"plane",
"distribution",
"as",
"an",
"instance",
"of",
"the",
"PMF",
"class"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/sources/source_conversion_utils.py#L100-L111 | train | 233,046 |
gem/oq-engine | openquake/commands/engine.py | run_job | def run_job(job_ini, log_level='info', log_file=None, exports='',
username=getpass.getuser(), **kw):
"""
Run a job using the specified config file and other options.
:param str job_ini:
Path to calculation config (INI-style) files.
:param str log_level:
'debug', 'info', 'warn', 'error', or 'critical'
:param str log_file:
Path to log file.
:param exports:
A comma-separated string of export types requested by the user.
:param username:
Name of the user running the job
:param kw:
Extra parameters like hazard_calculation_id and calculation_mode
"""
job_id = logs.init('job', getattr(logging, log_level.upper()))
with logs.handle(job_id, log_level, log_file):
job_ini = os.path.abspath(job_ini)
oqparam = eng.job_from_file(job_ini, job_id, username, **kw)
kw['username'] = username
eng.run_calc(job_id, oqparam, exports, **kw)
for line in logs.dbcmd('list_outputs', job_id, False):
safeprint(line)
return job_id | python | def run_job(job_ini, log_level='info', log_file=None, exports='',
username=getpass.getuser(), **kw):
"""
Run a job using the specified config file and other options.
:param str job_ini:
Path to calculation config (INI-style) files.
:param str log_level:
'debug', 'info', 'warn', 'error', or 'critical'
:param str log_file:
Path to log file.
:param exports:
A comma-separated string of export types requested by the user.
:param username:
Name of the user running the job
:param kw:
Extra parameters like hazard_calculation_id and calculation_mode
"""
job_id = logs.init('job', getattr(logging, log_level.upper()))
with logs.handle(job_id, log_level, log_file):
job_ini = os.path.abspath(job_ini)
oqparam = eng.job_from_file(job_ini, job_id, username, **kw)
kw['username'] = username
eng.run_calc(job_id, oqparam, exports, **kw)
for line in logs.dbcmd('list_outputs', job_id, False):
safeprint(line)
return job_id | [
"def",
"run_job",
"(",
"job_ini",
",",
"log_level",
"=",
"'info'",
",",
"log_file",
"=",
"None",
",",
"exports",
"=",
"''",
",",
"username",
"=",
"getpass",
".",
"getuser",
"(",
")",
",",
"*",
"*",
"kw",
")",
":",
"job_id",
"=",
"logs",
".",
"init",
"(",
"'job'",
",",
"getattr",
"(",
"logging",
",",
"log_level",
".",
"upper",
"(",
")",
")",
")",
"with",
"logs",
".",
"handle",
"(",
"job_id",
",",
"log_level",
",",
"log_file",
")",
":",
"job_ini",
"=",
"os",
".",
"path",
".",
"abspath",
"(",
"job_ini",
")",
"oqparam",
"=",
"eng",
".",
"job_from_file",
"(",
"job_ini",
",",
"job_id",
",",
"username",
",",
"*",
"*",
"kw",
")",
"kw",
"[",
"'username'",
"]",
"=",
"username",
"eng",
".",
"run_calc",
"(",
"job_id",
",",
"oqparam",
",",
"exports",
",",
"*",
"*",
"kw",
")",
"for",
"line",
"in",
"logs",
".",
"dbcmd",
"(",
"'list_outputs'",
",",
"job_id",
",",
"False",
")",
":",
"safeprint",
"(",
"line",
")",
"return",
"job_id"
] | Run a job using the specified config file and other options.
:param str job_ini:
Path to calculation config (INI-style) files.
:param str log_level:
'debug', 'info', 'warn', 'error', or 'critical'
:param str log_file:
Path to log file.
:param exports:
A comma-separated string of export types requested by the user.
:param username:
Name of the user running the job
:param kw:
Extra parameters like hazard_calculation_id and calculation_mode | [
"Run",
"a",
"job",
"using",
"the",
"specified",
"config",
"file",
"and",
"other",
"options",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commands/engine.py#L45-L71 | train | 233,047 |
gem/oq-engine | openquake/commands/engine.py | run_tile | def run_tile(job_ini, sites_slice):
"""
Used in tiling calculations
"""
return run_job(job_ini, sites_slice=(sites_slice.start, sites_slice.stop)) | python | def run_tile(job_ini, sites_slice):
"""
Used in tiling calculations
"""
return run_job(job_ini, sites_slice=(sites_slice.start, sites_slice.stop)) | [
"def",
"run_tile",
"(",
"job_ini",
",",
"sites_slice",
")",
":",
"return",
"run_job",
"(",
"job_ini",
",",
"sites_slice",
"=",
"(",
"sites_slice",
".",
"start",
",",
"sites_slice",
".",
"stop",
")",
")"
] | Used in tiling calculations | [
"Used",
"in",
"tiling",
"calculations"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commands/engine.py#L74-L78 | train | 233,048 |
gem/oq-engine | openquake/commands/engine.py | del_calculation | def del_calculation(job_id, confirmed=False):
"""
Delete a calculation and all associated outputs.
"""
if logs.dbcmd('get_job', job_id) is None:
print('There is no job %d' % job_id)
return
if confirmed or confirm(
'Are you sure you want to (abort and) delete this calculation and '
'all associated outputs?\nThis action cannot be undone. (y/n): '):
try:
abort(job_id)
resp = logs.dbcmd('del_calc', job_id, getpass.getuser())
except RuntimeError as err:
safeprint(err)
else:
if 'success' in resp:
print('Removed %d' % job_id)
else:
print(resp['error']) | python | def del_calculation(job_id, confirmed=False):
"""
Delete a calculation and all associated outputs.
"""
if logs.dbcmd('get_job', job_id) is None:
print('There is no job %d' % job_id)
return
if confirmed or confirm(
'Are you sure you want to (abort and) delete this calculation and '
'all associated outputs?\nThis action cannot be undone. (y/n): '):
try:
abort(job_id)
resp = logs.dbcmd('del_calc', job_id, getpass.getuser())
except RuntimeError as err:
safeprint(err)
else:
if 'success' in resp:
print('Removed %d' % job_id)
else:
print(resp['error']) | [
"def",
"del_calculation",
"(",
"job_id",
",",
"confirmed",
"=",
"False",
")",
":",
"if",
"logs",
".",
"dbcmd",
"(",
"'get_job'",
",",
"job_id",
")",
"is",
"None",
":",
"print",
"(",
"'There is no job %d'",
"%",
"job_id",
")",
"return",
"if",
"confirmed",
"or",
"confirm",
"(",
"'Are you sure you want to (abort and) delete this calculation and '",
"'all associated outputs?\\nThis action cannot be undone. (y/n): '",
")",
":",
"try",
":",
"abort",
"(",
"job_id",
")",
"resp",
"=",
"logs",
".",
"dbcmd",
"(",
"'del_calc'",
",",
"job_id",
",",
"getpass",
".",
"getuser",
"(",
")",
")",
"except",
"RuntimeError",
"as",
"err",
":",
"safeprint",
"(",
"err",
")",
"else",
":",
"if",
"'success'",
"in",
"resp",
":",
"print",
"(",
"'Removed %d'",
"%",
"job_id",
")",
"else",
":",
"print",
"(",
"resp",
"[",
"'error'",
"]",
")"
] | Delete a calculation and all associated outputs. | [
"Delete",
"a",
"calculation",
"and",
"all",
"associated",
"outputs",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commands/engine.py#L81-L101 | train | 233,049 |
gem/oq-engine | openquake/commands/engine.py | smart_run | def smart_run(job_ini, oqparam, log_level, log_file, exports, reuse_hazard):
"""
Run calculations by storing their hazard checksum and reusing previous
calculations if requested.
"""
haz_checksum = readinput.get_checksum32(oqparam, hazard=True)
# retrieve an old calculation with the right checksum, if any
job = logs.dbcmd('get_job_from_checksum', haz_checksum)
reuse = reuse_hazard and job and os.path.exists(job.ds_calc_dir + '.hdf5')
# recompute the hazard and store the checksum
ebr = (oqparam.calculation_mode == 'event_based_risk' and
'gmfs' not in oqparam.inputs)
if ebr:
kw = dict(calculation_mode='event_based')
if (oqparam.sites or 'sites' in oqparam.inputs or
'site_model' in oqparam.inputs):
# remove exposure from the hazard
kw['exposure_file'] = ''
else:
kw = {}
if not reuse:
hc_id = run_job(job_ini, log_level, log_file, exports, **kw)
if job is None:
logs.dbcmd('add_checksum', hc_id, haz_checksum)
elif not reuse_hazard or not os.path.exists(job.ds_calc_dir + '.hdf5'):
logs.dbcmd('update_job_checksum', hc_id, haz_checksum)
if ebr:
run_job(job_ini, log_level, log_file,
exports, hazard_calculation_id=hc_id)
else:
hc_id = job.id
logging.info('Reusing job #%d', job.id)
run_job(job_ini, log_level, log_file,
exports, hazard_calculation_id=hc_id) | python | def smart_run(job_ini, oqparam, log_level, log_file, exports, reuse_hazard):
"""
Run calculations by storing their hazard checksum and reusing previous
calculations if requested.
"""
haz_checksum = readinput.get_checksum32(oqparam, hazard=True)
# retrieve an old calculation with the right checksum, if any
job = logs.dbcmd('get_job_from_checksum', haz_checksum)
reuse = reuse_hazard and job and os.path.exists(job.ds_calc_dir + '.hdf5')
# recompute the hazard and store the checksum
ebr = (oqparam.calculation_mode == 'event_based_risk' and
'gmfs' not in oqparam.inputs)
if ebr:
kw = dict(calculation_mode='event_based')
if (oqparam.sites or 'sites' in oqparam.inputs or
'site_model' in oqparam.inputs):
# remove exposure from the hazard
kw['exposure_file'] = ''
else:
kw = {}
if not reuse:
hc_id = run_job(job_ini, log_level, log_file, exports, **kw)
if job is None:
logs.dbcmd('add_checksum', hc_id, haz_checksum)
elif not reuse_hazard or not os.path.exists(job.ds_calc_dir + '.hdf5'):
logs.dbcmd('update_job_checksum', hc_id, haz_checksum)
if ebr:
run_job(job_ini, log_level, log_file,
exports, hazard_calculation_id=hc_id)
else:
hc_id = job.id
logging.info('Reusing job #%d', job.id)
run_job(job_ini, log_level, log_file,
exports, hazard_calculation_id=hc_id) | [
"def",
"smart_run",
"(",
"job_ini",
",",
"oqparam",
",",
"log_level",
",",
"log_file",
",",
"exports",
",",
"reuse_hazard",
")",
":",
"haz_checksum",
"=",
"readinput",
".",
"get_checksum32",
"(",
"oqparam",
",",
"hazard",
"=",
"True",
")",
"# retrieve an old calculation with the right checksum, if any",
"job",
"=",
"logs",
".",
"dbcmd",
"(",
"'get_job_from_checksum'",
",",
"haz_checksum",
")",
"reuse",
"=",
"reuse_hazard",
"and",
"job",
"and",
"os",
".",
"path",
".",
"exists",
"(",
"job",
".",
"ds_calc_dir",
"+",
"'.hdf5'",
")",
"# recompute the hazard and store the checksum",
"ebr",
"=",
"(",
"oqparam",
".",
"calculation_mode",
"==",
"'event_based_risk'",
"and",
"'gmfs'",
"not",
"in",
"oqparam",
".",
"inputs",
")",
"if",
"ebr",
":",
"kw",
"=",
"dict",
"(",
"calculation_mode",
"=",
"'event_based'",
")",
"if",
"(",
"oqparam",
".",
"sites",
"or",
"'sites'",
"in",
"oqparam",
".",
"inputs",
"or",
"'site_model'",
"in",
"oqparam",
".",
"inputs",
")",
":",
"# remove exposure from the hazard",
"kw",
"[",
"'exposure_file'",
"]",
"=",
"''",
"else",
":",
"kw",
"=",
"{",
"}",
"if",
"not",
"reuse",
":",
"hc_id",
"=",
"run_job",
"(",
"job_ini",
",",
"log_level",
",",
"log_file",
",",
"exports",
",",
"*",
"*",
"kw",
")",
"if",
"job",
"is",
"None",
":",
"logs",
".",
"dbcmd",
"(",
"'add_checksum'",
",",
"hc_id",
",",
"haz_checksum",
")",
"elif",
"not",
"reuse_hazard",
"or",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"job",
".",
"ds_calc_dir",
"+",
"'.hdf5'",
")",
":",
"logs",
".",
"dbcmd",
"(",
"'update_job_checksum'",
",",
"hc_id",
",",
"haz_checksum",
")",
"if",
"ebr",
":",
"run_job",
"(",
"job_ini",
",",
"log_level",
",",
"log_file",
",",
"exports",
",",
"hazard_calculation_id",
"=",
"hc_id",
")",
"else",
":",
"hc_id",
"=",
"job",
".",
"id",
"logging",
".",
"info",
"(",
"'Reusing job #%d'",
",",
"job",
".",
"id",
")",
"run_job",
"(",
"job_ini",
",",
"log_level",
",",
"log_file",
",",
"exports",
",",
"hazard_calculation_id",
"=",
"hc_id",
")"
] | Run calculations by storing their hazard checksum and reusing previous
calculations if requested. | [
"Run",
"calculations",
"by",
"storing",
"their",
"hazard",
"checksum",
"and",
"reusing",
"previous",
"calculations",
"if",
"requested",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commands/engine.py#L104-L137 | train | 233,050 |
gem/oq-engine | openquake/hazardlib/gsim/campbell_bozorgnia_2008.py | CampbellBozorgnia2008._get_stddevs | def _get_stddevs(self, C, sites, pga1100, sigma_pga, stddev_types):
"""
Returns the standard deviations as described in the "ALEATORY
UNCERTAINTY MODEL" section of the paper. Equations 13 to 19, pages 147
to 151
"""
std_intra = self._compute_intra_event_std(C,
sites.vs30,
pga1100,
sigma_pga)
std_inter = C['t_lny'] * np.ones_like(sites.vs30)
stddevs = []
for stddev_type in stddev_types:
assert stddev_type in self.DEFINED_FOR_STANDARD_DEVIATION_TYPES
if stddev_type == const.StdDev.TOTAL:
stddevs.append(self._get_total_sigma(C, std_intra, std_inter))
elif stddev_type == const.StdDev.INTRA_EVENT:
stddevs.append(std_intra)
elif stddev_type == const.StdDev.INTER_EVENT:
stddevs.append(std_inter)
return stddevs | python | def _get_stddevs(self, C, sites, pga1100, sigma_pga, stddev_types):
"""
Returns the standard deviations as described in the "ALEATORY
UNCERTAINTY MODEL" section of the paper. Equations 13 to 19, pages 147
to 151
"""
std_intra = self._compute_intra_event_std(C,
sites.vs30,
pga1100,
sigma_pga)
std_inter = C['t_lny'] * np.ones_like(sites.vs30)
stddevs = []
for stddev_type in stddev_types:
assert stddev_type in self.DEFINED_FOR_STANDARD_DEVIATION_TYPES
if stddev_type == const.StdDev.TOTAL:
stddevs.append(self._get_total_sigma(C, std_intra, std_inter))
elif stddev_type == const.StdDev.INTRA_EVENT:
stddevs.append(std_intra)
elif stddev_type == const.StdDev.INTER_EVENT:
stddevs.append(std_inter)
return stddevs | [
"def",
"_get_stddevs",
"(",
"self",
",",
"C",
",",
"sites",
",",
"pga1100",
",",
"sigma_pga",
",",
"stddev_types",
")",
":",
"std_intra",
"=",
"self",
".",
"_compute_intra_event_std",
"(",
"C",
",",
"sites",
".",
"vs30",
",",
"pga1100",
",",
"sigma_pga",
")",
"std_inter",
"=",
"C",
"[",
"'t_lny'",
"]",
"*",
"np",
".",
"ones_like",
"(",
"sites",
".",
"vs30",
")",
"stddevs",
"=",
"[",
"]",
"for",
"stddev_type",
"in",
"stddev_types",
":",
"assert",
"stddev_type",
"in",
"self",
".",
"DEFINED_FOR_STANDARD_DEVIATION_TYPES",
"if",
"stddev_type",
"==",
"const",
".",
"StdDev",
".",
"TOTAL",
":",
"stddevs",
".",
"append",
"(",
"self",
".",
"_get_total_sigma",
"(",
"C",
",",
"std_intra",
",",
"std_inter",
")",
")",
"elif",
"stddev_type",
"==",
"const",
".",
"StdDev",
".",
"INTRA_EVENT",
":",
"stddevs",
".",
"append",
"(",
"std_intra",
")",
"elif",
"stddev_type",
"==",
"const",
".",
"StdDev",
".",
"INTER_EVENT",
":",
"stddevs",
".",
"append",
"(",
"std_inter",
")",
"return",
"stddevs"
] | Returns the standard deviations as described in the "ALEATORY
UNCERTAINTY MODEL" section of the paper. Equations 13 to 19, pages 147
to 151 | [
"Returns",
"the",
"standard",
"deviations",
"as",
"described",
"in",
"the",
"ALEATORY",
"UNCERTAINTY",
"MODEL",
"section",
"of",
"the",
"paper",
".",
"Equations",
"13",
"to",
"19",
"pages",
"147",
"to",
"151"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/campbell_bozorgnia_2008.py#L300-L321 | train | 233,051 |
gem/oq-engine | openquake/hazardlib/gsim/campbell_bozorgnia_2008.py | CampbellBozorgnia2008._compute_intra_event_std | def _compute_intra_event_std(self, C, vs30, pga1100, sigma_pga):
"""
Returns the intra-event standard deviation at the site, as defined in
equation 15, page 147
"""
# Get intra-event standard deviation at the base of the site profile
sig_lnyb = np.sqrt(C['s_lny'] ** 2. - C['s_lnAF'] ** 2.)
sig_lnab = np.sqrt(sigma_pga ** 2. - C['s_lnAF'] ** 2.)
# Get linearised relationship between f_site and ln PGA
alpha = self._compute_intra_event_alpha(C, vs30, pga1100)
return np.sqrt(
(sig_lnyb ** 2.) +
(C['s_lnAF'] ** 2.) +
((alpha ** 2.) * (sig_lnab ** 2.)) +
(2.0 * alpha * C['rho'] * sig_lnyb * sig_lnab)) | python | def _compute_intra_event_std(self, C, vs30, pga1100, sigma_pga):
"""
Returns the intra-event standard deviation at the site, as defined in
equation 15, page 147
"""
# Get intra-event standard deviation at the base of the site profile
sig_lnyb = np.sqrt(C['s_lny'] ** 2. - C['s_lnAF'] ** 2.)
sig_lnab = np.sqrt(sigma_pga ** 2. - C['s_lnAF'] ** 2.)
# Get linearised relationship between f_site and ln PGA
alpha = self._compute_intra_event_alpha(C, vs30, pga1100)
return np.sqrt(
(sig_lnyb ** 2.) +
(C['s_lnAF'] ** 2.) +
((alpha ** 2.) * (sig_lnab ** 2.)) +
(2.0 * alpha * C['rho'] * sig_lnyb * sig_lnab)) | [
"def",
"_compute_intra_event_std",
"(",
"self",
",",
"C",
",",
"vs30",
",",
"pga1100",
",",
"sigma_pga",
")",
":",
"# Get intra-event standard deviation at the base of the site profile",
"sig_lnyb",
"=",
"np",
".",
"sqrt",
"(",
"C",
"[",
"'s_lny'",
"]",
"**",
"2.",
"-",
"C",
"[",
"'s_lnAF'",
"]",
"**",
"2.",
")",
"sig_lnab",
"=",
"np",
".",
"sqrt",
"(",
"sigma_pga",
"**",
"2.",
"-",
"C",
"[",
"'s_lnAF'",
"]",
"**",
"2.",
")",
"# Get linearised relationship between f_site and ln PGA",
"alpha",
"=",
"self",
".",
"_compute_intra_event_alpha",
"(",
"C",
",",
"vs30",
",",
"pga1100",
")",
"return",
"np",
".",
"sqrt",
"(",
"(",
"sig_lnyb",
"**",
"2.",
")",
"+",
"(",
"C",
"[",
"'s_lnAF'",
"]",
"**",
"2.",
")",
"+",
"(",
"(",
"alpha",
"**",
"2.",
")",
"*",
"(",
"sig_lnab",
"**",
"2.",
")",
")",
"+",
"(",
"2.0",
"*",
"alpha",
"*",
"C",
"[",
"'rho'",
"]",
"*",
"sig_lnyb",
"*",
"sig_lnab",
")",
")"
] | Returns the intra-event standard deviation at the site, as defined in
equation 15, page 147 | [
"Returns",
"the",
"intra",
"-",
"event",
"standard",
"deviation",
"at",
"the",
"site",
"as",
"defined",
"in",
"equation",
"15",
"page",
"147"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/campbell_bozorgnia_2008.py#L323-L338 | train | 233,052 |
gem/oq-engine | openquake/hazardlib/gsim/campbell_bozorgnia_2008.py | CampbellBozorgnia2008._compute_intra_event_alpha | def _compute_intra_event_alpha(self, C, vs30, pga1100):
"""
Returns the linearised functional relationship between fsite and
pga1100, determined from the partial derivative defined on equation 17
on page 148
"""
alpha = np.zeros_like(vs30, dtype=float)
idx = vs30 < C['k1']
if np.any(idx):
temp1 = (pga1100[idx] +
C['c'] * (vs30[idx] / C['k1']) ** C['n']) ** -1.
temp1 = temp1 - ((pga1100[idx] + C['c']) ** -1.)
alpha[idx] = C['k2'] * pga1100[idx] * temp1
return alpha | python | def _compute_intra_event_alpha(self, C, vs30, pga1100):
"""
Returns the linearised functional relationship between fsite and
pga1100, determined from the partial derivative defined on equation 17
on page 148
"""
alpha = np.zeros_like(vs30, dtype=float)
idx = vs30 < C['k1']
if np.any(idx):
temp1 = (pga1100[idx] +
C['c'] * (vs30[idx] / C['k1']) ** C['n']) ** -1.
temp1 = temp1 - ((pga1100[idx] + C['c']) ** -1.)
alpha[idx] = C['k2'] * pga1100[idx] * temp1
return alpha | [
"def",
"_compute_intra_event_alpha",
"(",
"self",
",",
"C",
",",
"vs30",
",",
"pga1100",
")",
":",
"alpha",
"=",
"np",
".",
"zeros_like",
"(",
"vs30",
",",
"dtype",
"=",
"float",
")",
"idx",
"=",
"vs30",
"<",
"C",
"[",
"'k1'",
"]",
"if",
"np",
".",
"any",
"(",
"idx",
")",
":",
"temp1",
"=",
"(",
"pga1100",
"[",
"idx",
"]",
"+",
"C",
"[",
"'c'",
"]",
"*",
"(",
"vs30",
"[",
"idx",
"]",
"/",
"C",
"[",
"'k1'",
"]",
")",
"**",
"C",
"[",
"'n'",
"]",
")",
"**",
"-",
"1.",
"temp1",
"=",
"temp1",
"-",
"(",
"(",
"pga1100",
"[",
"idx",
"]",
"+",
"C",
"[",
"'c'",
"]",
")",
"**",
"-",
"1.",
")",
"alpha",
"[",
"idx",
"]",
"=",
"C",
"[",
"'k2'",
"]",
"*",
"pga1100",
"[",
"idx",
"]",
"*",
"temp1",
"return",
"alpha"
] | Returns the linearised functional relationship between fsite and
pga1100, determined from the partial derivative defined on equation 17
on page 148 | [
"Returns",
"the",
"linearised",
"functional",
"relationship",
"between",
"fsite",
"and",
"pga1100",
"determined",
"from",
"the",
"partial",
"derivative",
"defined",
"on",
"equation",
"17",
"on",
"page",
"148"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/campbell_bozorgnia_2008.py#L340-L354 | train | 233,053 |
gem/oq-engine | openquake/hazardlib/gsim/campbell_bozorgnia_2008.py | CampbellBozorgnia2008Arbitrary._get_total_sigma | def _get_total_sigma(self, C, std_intra, std_inter):
"""
Returns the total sigma term for the arbitrary horizontal component of
ground motion defined by equation 18, page 150
"""
return np.sqrt(std_intra ** 2. + std_inter ** 2. + C['c_lny'] ** 2.) | python | def _get_total_sigma(self, C, std_intra, std_inter):
"""
Returns the total sigma term for the arbitrary horizontal component of
ground motion defined by equation 18, page 150
"""
return np.sqrt(std_intra ** 2. + std_inter ** 2. + C['c_lny'] ** 2.) | [
"def",
"_get_total_sigma",
"(",
"self",
",",
"C",
",",
"std_intra",
",",
"std_inter",
")",
":",
"return",
"np",
".",
"sqrt",
"(",
"std_intra",
"**",
"2.",
"+",
"std_inter",
"**",
"2.",
"+",
"C",
"[",
"'c_lny'",
"]",
"**",
"2.",
")"
] | Returns the total sigma term for the arbitrary horizontal component of
ground motion defined by equation 18, page 150 | [
"Returns",
"the",
"total",
"sigma",
"term",
"for",
"the",
"arbitrary",
"horizontal",
"component",
"of",
"ground",
"motion",
"defined",
"by",
"equation",
"18",
"page",
"150"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/campbell_bozorgnia_2008.py#L407-L412 | train | 233,054 |
gem/oq-engine | openquake/calculators/ucerf_event_based.py | generate_event_set | def generate_event_set(ucerf, background_sids, src_filter, ses_idx, seed):
"""
Generates the event set corresponding to a particular branch
"""
serial = seed + ses_idx * TWO16
# get rates from file
with h5py.File(ucerf.source_file, 'r') as hdf5:
occurrences = ucerf.tom.sample_number_of_occurrences(ucerf.rate, seed)
indices, = numpy.where(occurrences)
logging.debug(
'Considering "%s", %d ruptures', ucerf.source_id, len(indices))
# get ruptures from the indices
ruptures = []
rupture_occ = []
for iloc, n_occ in zip(indices, occurrences[indices]):
ucerf_rup = ucerf.get_ucerf_rupture(iloc, src_filter)
if ucerf_rup:
ucerf_rup.serial = serial
serial += 1
ruptures.append(ucerf_rup)
rupture_occ.append(n_occ)
# sample background sources
background_ruptures, background_n_occ = sample_background_model(
hdf5, ucerf.idx_set["grid_key"], ucerf.tom, seed,
background_sids, ucerf.min_mag, ucerf.npd, ucerf.hdd, ucerf.usd,
ucerf.lsd, ucerf.msr, ucerf.aspect, ucerf.tectonic_region_type)
for i, brup in enumerate(background_ruptures):
brup.serial = serial
serial += 1
ruptures.append(brup)
rupture_occ.extend(background_n_occ)
assert len(ruptures) < TWO16, len(ruptures) # < 2^16 ruptures per SES
return ruptures, rupture_occ | python | def generate_event_set(ucerf, background_sids, src_filter, ses_idx, seed):
"""
Generates the event set corresponding to a particular branch
"""
serial = seed + ses_idx * TWO16
# get rates from file
with h5py.File(ucerf.source_file, 'r') as hdf5:
occurrences = ucerf.tom.sample_number_of_occurrences(ucerf.rate, seed)
indices, = numpy.where(occurrences)
logging.debug(
'Considering "%s", %d ruptures', ucerf.source_id, len(indices))
# get ruptures from the indices
ruptures = []
rupture_occ = []
for iloc, n_occ in zip(indices, occurrences[indices]):
ucerf_rup = ucerf.get_ucerf_rupture(iloc, src_filter)
if ucerf_rup:
ucerf_rup.serial = serial
serial += 1
ruptures.append(ucerf_rup)
rupture_occ.append(n_occ)
# sample background sources
background_ruptures, background_n_occ = sample_background_model(
hdf5, ucerf.idx_set["grid_key"], ucerf.tom, seed,
background_sids, ucerf.min_mag, ucerf.npd, ucerf.hdd, ucerf.usd,
ucerf.lsd, ucerf.msr, ucerf.aspect, ucerf.tectonic_region_type)
for i, brup in enumerate(background_ruptures):
brup.serial = serial
serial += 1
ruptures.append(brup)
rupture_occ.extend(background_n_occ)
assert len(ruptures) < TWO16, len(ruptures) # < 2^16 ruptures per SES
return ruptures, rupture_occ | [
"def",
"generate_event_set",
"(",
"ucerf",
",",
"background_sids",
",",
"src_filter",
",",
"ses_idx",
",",
"seed",
")",
":",
"serial",
"=",
"seed",
"+",
"ses_idx",
"*",
"TWO16",
"# get rates from file",
"with",
"h5py",
".",
"File",
"(",
"ucerf",
".",
"source_file",
",",
"'r'",
")",
"as",
"hdf5",
":",
"occurrences",
"=",
"ucerf",
".",
"tom",
".",
"sample_number_of_occurrences",
"(",
"ucerf",
".",
"rate",
",",
"seed",
")",
"indices",
",",
"=",
"numpy",
".",
"where",
"(",
"occurrences",
")",
"logging",
".",
"debug",
"(",
"'Considering \"%s\", %d ruptures'",
",",
"ucerf",
".",
"source_id",
",",
"len",
"(",
"indices",
")",
")",
"# get ruptures from the indices",
"ruptures",
"=",
"[",
"]",
"rupture_occ",
"=",
"[",
"]",
"for",
"iloc",
",",
"n_occ",
"in",
"zip",
"(",
"indices",
",",
"occurrences",
"[",
"indices",
"]",
")",
":",
"ucerf_rup",
"=",
"ucerf",
".",
"get_ucerf_rupture",
"(",
"iloc",
",",
"src_filter",
")",
"if",
"ucerf_rup",
":",
"ucerf_rup",
".",
"serial",
"=",
"serial",
"serial",
"+=",
"1",
"ruptures",
".",
"append",
"(",
"ucerf_rup",
")",
"rupture_occ",
".",
"append",
"(",
"n_occ",
")",
"# sample background sources",
"background_ruptures",
",",
"background_n_occ",
"=",
"sample_background_model",
"(",
"hdf5",
",",
"ucerf",
".",
"idx_set",
"[",
"\"grid_key\"",
"]",
",",
"ucerf",
".",
"tom",
",",
"seed",
",",
"background_sids",
",",
"ucerf",
".",
"min_mag",
",",
"ucerf",
".",
"npd",
",",
"ucerf",
".",
"hdd",
",",
"ucerf",
".",
"usd",
",",
"ucerf",
".",
"lsd",
",",
"ucerf",
".",
"msr",
",",
"ucerf",
".",
"aspect",
",",
"ucerf",
".",
"tectonic_region_type",
")",
"for",
"i",
",",
"brup",
"in",
"enumerate",
"(",
"background_ruptures",
")",
":",
"brup",
".",
"serial",
"=",
"serial",
"serial",
"+=",
"1",
"ruptures",
".",
"append",
"(",
"brup",
")",
"rupture_occ",
".",
"extend",
"(",
"background_n_occ",
")",
"assert",
"len",
"(",
"ruptures",
")",
"<",
"TWO16",
",",
"len",
"(",
"ruptures",
")",
"# < 2^16 ruptures per SES",
"return",
"ruptures",
",",
"rupture_occ"
] | Generates the event set corresponding to a particular branch | [
"Generates",
"the",
"event",
"set",
"corresponding",
"to",
"a",
"particular",
"branch"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/ucerf_event_based.py#L41-L76 | train | 233,055 |
gem/oq-engine | openquake/calculators/ucerf_event_based.py | sample_background_model | def sample_background_model(
hdf5, branch_key, tom, seed, filter_idx, min_mag, npd, hdd,
upper_seismogenic_depth, lower_seismogenic_depth, msr=WC1994(),
aspect=1.5, trt=DEFAULT_TRT):
"""
Generates a rupture set from a sample of the background model
:param branch_key:
Key to indicate the branch for selecting the background model
:param tom:
Temporal occurrence model as instance of :class:
openquake.hazardlib.tom.TOM
:param seed:
Random seed to use in the call to tom.sample_number_of_occurrences
:param filter_idx:
Sites for consideration (can be None!)
:param float min_mag:
Minimim magnitude for consideration of background sources
:param npd:
Nodal plane distribution as instance of :class:
openquake.hazardlib.pmf.PMF
:param hdd:
Hypocentral depth distribution as instance of :class:
openquake.hazardlib.pmf.PMF
:param float aspect:
Aspect ratio
:param float upper_seismogenic_depth:
Upper seismogenic depth (km)
:param float lower_seismogenic_depth:
Lower seismogenic depth (km)
:param msr:
Magnitude scaling relation
:param float integration_distance:
Maximum distance from rupture to site for consideration
"""
bg_magnitudes = hdf5["/".join(["Grid", branch_key, "Magnitude"])].value
# Select magnitudes above the minimum magnitudes
mag_idx = bg_magnitudes >= min_mag
mags = bg_magnitudes[mag_idx]
rates = hdf5["/".join(["Grid", branch_key, "RateArray"])][filter_idx, :]
rates = rates[:, mag_idx]
valid_locs = hdf5["Grid/Locations"][filter_idx, :]
# Sample remaining rates
sampler = tom.sample_number_of_occurrences(rates, seed)
background_ruptures = []
background_n_occ = []
for i, mag in enumerate(mags):
rate_idx = numpy.where(sampler[:, i])[0]
rate_cnt = sampler[rate_idx, i]
occurrence = rates[rate_idx, i]
locations = valid_locs[rate_idx, :]
ruptures = generate_background_ruptures(
tom, locations, occurrence,
mag, npd, hdd, upper_seismogenic_depth,
lower_seismogenic_depth, msr, aspect, trt)
background_ruptures.extend(ruptures)
background_n_occ.extend(rate_cnt.tolist())
return background_ruptures, background_n_occ | python | def sample_background_model(
hdf5, branch_key, tom, seed, filter_idx, min_mag, npd, hdd,
upper_seismogenic_depth, lower_seismogenic_depth, msr=WC1994(),
aspect=1.5, trt=DEFAULT_TRT):
"""
Generates a rupture set from a sample of the background model
:param branch_key:
Key to indicate the branch for selecting the background model
:param tom:
Temporal occurrence model as instance of :class:
openquake.hazardlib.tom.TOM
:param seed:
Random seed to use in the call to tom.sample_number_of_occurrences
:param filter_idx:
Sites for consideration (can be None!)
:param float min_mag:
Minimim magnitude for consideration of background sources
:param npd:
Nodal plane distribution as instance of :class:
openquake.hazardlib.pmf.PMF
:param hdd:
Hypocentral depth distribution as instance of :class:
openquake.hazardlib.pmf.PMF
:param float aspect:
Aspect ratio
:param float upper_seismogenic_depth:
Upper seismogenic depth (km)
:param float lower_seismogenic_depth:
Lower seismogenic depth (km)
:param msr:
Magnitude scaling relation
:param float integration_distance:
Maximum distance from rupture to site for consideration
"""
bg_magnitudes = hdf5["/".join(["Grid", branch_key, "Magnitude"])].value
# Select magnitudes above the minimum magnitudes
mag_idx = bg_magnitudes >= min_mag
mags = bg_magnitudes[mag_idx]
rates = hdf5["/".join(["Grid", branch_key, "RateArray"])][filter_idx, :]
rates = rates[:, mag_idx]
valid_locs = hdf5["Grid/Locations"][filter_idx, :]
# Sample remaining rates
sampler = tom.sample_number_of_occurrences(rates, seed)
background_ruptures = []
background_n_occ = []
for i, mag in enumerate(mags):
rate_idx = numpy.where(sampler[:, i])[0]
rate_cnt = sampler[rate_idx, i]
occurrence = rates[rate_idx, i]
locations = valid_locs[rate_idx, :]
ruptures = generate_background_ruptures(
tom, locations, occurrence,
mag, npd, hdd, upper_seismogenic_depth,
lower_seismogenic_depth, msr, aspect, trt)
background_ruptures.extend(ruptures)
background_n_occ.extend(rate_cnt.tolist())
return background_ruptures, background_n_occ | [
"def",
"sample_background_model",
"(",
"hdf5",
",",
"branch_key",
",",
"tom",
",",
"seed",
",",
"filter_idx",
",",
"min_mag",
",",
"npd",
",",
"hdd",
",",
"upper_seismogenic_depth",
",",
"lower_seismogenic_depth",
",",
"msr",
"=",
"WC1994",
"(",
")",
",",
"aspect",
"=",
"1.5",
",",
"trt",
"=",
"DEFAULT_TRT",
")",
":",
"bg_magnitudes",
"=",
"hdf5",
"[",
"\"/\"",
".",
"join",
"(",
"[",
"\"Grid\"",
",",
"branch_key",
",",
"\"Magnitude\"",
"]",
")",
"]",
".",
"value",
"# Select magnitudes above the minimum magnitudes",
"mag_idx",
"=",
"bg_magnitudes",
">=",
"min_mag",
"mags",
"=",
"bg_magnitudes",
"[",
"mag_idx",
"]",
"rates",
"=",
"hdf5",
"[",
"\"/\"",
".",
"join",
"(",
"[",
"\"Grid\"",
",",
"branch_key",
",",
"\"RateArray\"",
"]",
")",
"]",
"[",
"filter_idx",
",",
":",
"]",
"rates",
"=",
"rates",
"[",
":",
",",
"mag_idx",
"]",
"valid_locs",
"=",
"hdf5",
"[",
"\"Grid/Locations\"",
"]",
"[",
"filter_idx",
",",
":",
"]",
"# Sample remaining rates",
"sampler",
"=",
"tom",
".",
"sample_number_of_occurrences",
"(",
"rates",
",",
"seed",
")",
"background_ruptures",
"=",
"[",
"]",
"background_n_occ",
"=",
"[",
"]",
"for",
"i",
",",
"mag",
"in",
"enumerate",
"(",
"mags",
")",
":",
"rate_idx",
"=",
"numpy",
".",
"where",
"(",
"sampler",
"[",
":",
",",
"i",
"]",
")",
"[",
"0",
"]",
"rate_cnt",
"=",
"sampler",
"[",
"rate_idx",
",",
"i",
"]",
"occurrence",
"=",
"rates",
"[",
"rate_idx",
",",
"i",
"]",
"locations",
"=",
"valid_locs",
"[",
"rate_idx",
",",
":",
"]",
"ruptures",
"=",
"generate_background_ruptures",
"(",
"tom",
",",
"locations",
",",
"occurrence",
",",
"mag",
",",
"npd",
",",
"hdd",
",",
"upper_seismogenic_depth",
",",
"lower_seismogenic_depth",
",",
"msr",
",",
"aspect",
",",
"trt",
")",
"background_ruptures",
".",
"extend",
"(",
"ruptures",
")",
"background_n_occ",
".",
"extend",
"(",
"rate_cnt",
".",
"tolist",
"(",
")",
")",
"return",
"background_ruptures",
",",
"background_n_occ"
] | Generates a rupture set from a sample of the background model
:param branch_key:
Key to indicate the branch for selecting the background model
:param tom:
Temporal occurrence model as instance of :class:
openquake.hazardlib.tom.TOM
:param seed:
Random seed to use in the call to tom.sample_number_of_occurrences
:param filter_idx:
Sites for consideration (can be None!)
:param float min_mag:
Minimim magnitude for consideration of background sources
:param npd:
Nodal plane distribution as instance of :class:
openquake.hazardlib.pmf.PMF
:param hdd:
Hypocentral depth distribution as instance of :class:
openquake.hazardlib.pmf.PMF
:param float aspect:
Aspect ratio
:param float upper_seismogenic_depth:
Upper seismogenic depth (km)
:param float lower_seismogenic_depth:
Lower seismogenic depth (km)
:param msr:
Magnitude scaling relation
:param float integration_distance:
Maximum distance from rupture to site for consideration | [
"Generates",
"a",
"rupture",
"set",
"from",
"a",
"sample",
"of",
"the",
"background",
"model"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/ucerf_event_based.py#L79-L136 | train | 233,056 |
gem/oq-engine | openquake/hazardlib/scalerel/wc1994.py | WC1994.get_median_area | def get_median_area(self, mag, rake):
"""
The values are a function of both magnitude and rake.
Setting the rake to ``None`` causes their "All" rupture-types
to be applied.
"""
assert rake is None or -180 <= rake <= 180
if rake is None:
# their "All" case
return 10.0 ** (-3.49 + 0.91 * mag)
elif (-45 <= rake <= 45) or (rake >= 135) or (rake <= -135):
# strike slip
return 10.0 ** (-3.42 + 0.90 * mag)
elif rake > 0:
# thrust/reverse
return 10.0 ** (-3.99 + 0.98 * mag)
else:
# normal
return 10.0 ** (-2.87 + 0.82 * mag) | python | def get_median_area(self, mag, rake):
"""
The values are a function of both magnitude and rake.
Setting the rake to ``None`` causes their "All" rupture-types
to be applied.
"""
assert rake is None or -180 <= rake <= 180
if rake is None:
# their "All" case
return 10.0 ** (-3.49 + 0.91 * mag)
elif (-45 <= rake <= 45) or (rake >= 135) or (rake <= -135):
# strike slip
return 10.0 ** (-3.42 + 0.90 * mag)
elif rake > 0:
# thrust/reverse
return 10.0 ** (-3.99 + 0.98 * mag)
else:
# normal
return 10.0 ** (-2.87 + 0.82 * mag) | [
"def",
"get_median_area",
"(",
"self",
",",
"mag",
",",
"rake",
")",
":",
"assert",
"rake",
"is",
"None",
"or",
"-",
"180",
"<=",
"rake",
"<=",
"180",
"if",
"rake",
"is",
"None",
":",
"# their \"All\" case",
"return",
"10.0",
"**",
"(",
"-",
"3.49",
"+",
"0.91",
"*",
"mag",
")",
"elif",
"(",
"-",
"45",
"<=",
"rake",
"<=",
"45",
")",
"or",
"(",
"rake",
">=",
"135",
")",
"or",
"(",
"rake",
"<=",
"-",
"135",
")",
":",
"# strike slip",
"return",
"10.0",
"**",
"(",
"-",
"3.42",
"+",
"0.90",
"*",
"mag",
")",
"elif",
"rake",
">",
"0",
":",
"# thrust/reverse",
"return",
"10.0",
"**",
"(",
"-",
"3.99",
"+",
"0.98",
"*",
"mag",
")",
"else",
":",
"# normal",
"return",
"10.0",
"**",
"(",
"-",
"2.87",
"+",
"0.82",
"*",
"mag",
")"
] | The values are a function of both magnitude and rake.
Setting the rake to ``None`` causes their "All" rupture-types
to be applied. | [
"The",
"values",
"are",
"a",
"function",
"of",
"both",
"magnitude",
"and",
"rake",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/scalerel/wc1994.py#L33-L52 | train | 233,057 |
gem/oq-engine | openquake/hazardlib/scalerel/wc1994.py | WC1994.get_std_dev_area | def get_std_dev_area(self, mag, rake):
"""
Standard deviation for WC1994. Magnitude is ignored.
"""
assert rake is None or -180 <= rake <= 180
if rake is None:
# their "All" case
return 0.24
elif (-45 <= rake <= 45) or (rake >= 135) or (rake <= -135):
# strike slip
return 0.22
elif rake > 0:
# thrust/reverse
return 0.26
else:
# normal
return 0.22 | python | def get_std_dev_area(self, mag, rake):
"""
Standard deviation for WC1994. Magnitude is ignored.
"""
assert rake is None or -180 <= rake <= 180
if rake is None:
# their "All" case
return 0.24
elif (-45 <= rake <= 45) or (rake >= 135) or (rake <= -135):
# strike slip
return 0.22
elif rake > 0:
# thrust/reverse
return 0.26
else:
# normal
return 0.22 | [
"def",
"get_std_dev_area",
"(",
"self",
",",
"mag",
",",
"rake",
")",
":",
"assert",
"rake",
"is",
"None",
"or",
"-",
"180",
"<=",
"rake",
"<=",
"180",
"if",
"rake",
"is",
"None",
":",
"# their \"All\" case",
"return",
"0.24",
"elif",
"(",
"-",
"45",
"<=",
"rake",
"<=",
"45",
")",
"or",
"(",
"rake",
">=",
"135",
")",
"or",
"(",
"rake",
"<=",
"-",
"135",
")",
":",
"# strike slip",
"return",
"0.22",
"elif",
"rake",
">",
"0",
":",
"# thrust/reverse",
"return",
"0.26",
"else",
":",
"# normal",
"return",
"0.22"
] | Standard deviation for WC1994. Magnitude is ignored. | [
"Standard",
"deviation",
"for",
"WC1994",
".",
"Magnitude",
"is",
"ignored",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/scalerel/wc1994.py#L54-L70 | train | 233,058 |
gem/oq-engine | openquake/hazardlib/scalerel/wc1994.py | WC1994.get_std_dev_mag | def get_std_dev_mag(self, rake):
"""
Standard deviation on the magnitude for the WC1994 area relation.
"""
assert rake is None or -180 <= rake <= 180
if rake is None:
# their "All" case
return 0.24
elif (-45 <= rake <= 45) or (rake >= 135) or (rake <= -135):
# strike slip
return 0.23
elif rake > 0:
# thrust/reverse
return 0.25
else:
# normal
return 0.25 | python | def get_std_dev_mag(self, rake):
"""
Standard deviation on the magnitude for the WC1994 area relation.
"""
assert rake is None or -180 <= rake <= 180
if rake is None:
# their "All" case
return 0.24
elif (-45 <= rake <= 45) or (rake >= 135) or (rake <= -135):
# strike slip
return 0.23
elif rake > 0:
# thrust/reverse
return 0.25
else:
# normal
return 0.25 | [
"def",
"get_std_dev_mag",
"(",
"self",
",",
"rake",
")",
":",
"assert",
"rake",
"is",
"None",
"or",
"-",
"180",
"<=",
"rake",
"<=",
"180",
"if",
"rake",
"is",
"None",
":",
"# their \"All\" case",
"return",
"0.24",
"elif",
"(",
"-",
"45",
"<=",
"rake",
"<=",
"45",
")",
"or",
"(",
"rake",
">=",
"135",
")",
"or",
"(",
"rake",
"<=",
"-",
"135",
")",
":",
"# strike slip",
"return",
"0.23",
"elif",
"rake",
">",
"0",
":",
"# thrust/reverse",
"return",
"0.25",
"else",
":",
"# normal",
"return",
"0.25"
] | Standard deviation on the magnitude for the WC1994 area relation. | [
"Standard",
"deviation",
"on",
"the",
"magnitude",
"for",
"the",
"WC1994",
"area",
"relation",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/scalerel/wc1994.py#L72-L88 | train | 233,059 |
gem/oq-engine | openquake/hazardlib/gsim/mgmpe/generic_gmpe_avgsa.py | GenericGmpeAvgSA.set_parameters | def set_parameters(self):
"""
Combines the parameters of the GMPE provided at the construction
level with the ones assigned to the average GMPE.
"""
for key in dir(self):
if key.startswith('REQUIRES_'):
setattr(self, key, getattr(self.gmpe, key))
if key.startswith('DEFINED_'):
if not key.endswith('FOR_INTENSITY_MEASURE_TYPES'):
setattr(self, key, getattr(self.gmpe, key)) | python | def set_parameters(self):
"""
Combines the parameters of the GMPE provided at the construction
level with the ones assigned to the average GMPE.
"""
for key in dir(self):
if key.startswith('REQUIRES_'):
setattr(self, key, getattr(self.gmpe, key))
if key.startswith('DEFINED_'):
if not key.endswith('FOR_INTENSITY_MEASURE_TYPES'):
setattr(self, key, getattr(self.gmpe, key)) | [
"def",
"set_parameters",
"(",
"self",
")",
":",
"for",
"key",
"in",
"dir",
"(",
"self",
")",
":",
"if",
"key",
".",
"startswith",
"(",
"'REQUIRES_'",
")",
":",
"setattr",
"(",
"self",
",",
"key",
",",
"getattr",
"(",
"self",
".",
"gmpe",
",",
"key",
")",
")",
"if",
"key",
".",
"startswith",
"(",
"'DEFINED_'",
")",
":",
"if",
"not",
"key",
".",
"endswith",
"(",
"'FOR_INTENSITY_MEASURE_TYPES'",
")",
":",
"setattr",
"(",
"self",
",",
"key",
",",
"getattr",
"(",
"self",
".",
"gmpe",
",",
"key",
")",
")"
] | Combines the parameters of the GMPE provided at the construction
level with the ones assigned to the average GMPE. | [
"Combines",
"the",
"parameters",
"of",
"the",
"GMPE",
"provided",
"at",
"the",
"construction",
"level",
"with",
"the",
"ones",
"assigned",
"to",
"the",
"average",
"GMPE",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/mgmpe/generic_gmpe_avgsa.py#L87-L97 | train | 233,060 |
gem/oq-engine | openquake/hazardlib/geo/mesh.py | Mesh.from_points_list | def from_points_list(cls, points):
"""
Create a mesh object from a collection of points.
:param point:
List of :class:`~openquake.hazardlib.geo.point.Point` objects.
:returns:
An instance of :class:`Mesh` with one-dimensional arrays
of coordinates from ``points``.
"""
lons = numpy.zeros(len(points), dtype=float)
lats = lons.copy()
depths = lons.copy()
for i in range(len(points)):
lons[i] = points[i].longitude
lats[i] = points[i].latitude
depths[i] = points[i].depth
if not depths.any():
# all points have zero depth, no need to waste memory
depths = None
return cls(lons, lats, depths) | python | def from_points_list(cls, points):
"""
Create a mesh object from a collection of points.
:param point:
List of :class:`~openquake.hazardlib.geo.point.Point` objects.
:returns:
An instance of :class:`Mesh` with one-dimensional arrays
of coordinates from ``points``.
"""
lons = numpy.zeros(len(points), dtype=float)
lats = lons.copy()
depths = lons.copy()
for i in range(len(points)):
lons[i] = points[i].longitude
lats[i] = points[i].latitude
depths[i] = points[i].depth
if not depths.any():
# all points have zero depth, no need to waste memory
depths = None
return cls(lons, lats, depths) | [
"def",
"from_points_list",
"(",
"cls",
",",
"points",
")",
":",
"lons",
"=",
"numpy",
".",
"zeros",
"(",
"len",
"(",
"points",
")",
",",
"dtype",
"=",
"float",
")",
"lats",
"=",
"lons",
".",
"copy",
"(",
")",
"depths",
"=",
"lons",
".",
"copy",
"(",
")",
"for",
"i",
"in",
"range",
"(",
"len",
"(",
"points",
")",
")",
":",
"lons",
"[",
"i",
"]",
"=",
"points",
"[",
"i",
"]",
".",
"longitude",
"lats",
"[",
"i",
"]",
"=",
"points",
"[",
"i",
"]",
".",
"latitude",
"depths",
"[",
"i",
"]",
"=",
"points",
"[",
"i",
"]",
".",
"depth",
"if",
"not",
"depths",
".",
"any",
"(",
")",
":",
"# all points have zero depth, no need to waste memory",
"depths",
"=",
"None",
"return",
"cls",
"(",
"lons",
",",
"lats",
",",
"depths",
")"
] | Create a mesh object from a collection of points.
:param point:
List of :class:`~openquake.hazardlib.geo.point.Point` objects.
:returns:
An instance of :class:`Mesh` with one-dimensional arrays
of coordinates from ``points``. | [
"Create",
"a",
"mesh",
"object",
"from",
"a",
"collection",
"of",
"points",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/mesh.py#L134-L154 | train | 233,061 |
gem/oq-engine | openquake/hazardlib/geo/mesh.py | Mesh.get_min_distance | def get_min_distance(self, mesh):
"""
Compute and return the minimum distance from the mesh to each point
in another mesh.
:returns:
numpy array of distances in km of shape (self.size, mesh.size)
Method doesn't make any assumptions on arrangement of the points
in either mesh and instead calculates the distance from each point of
this mesh to each point of the target mesh and returns the lowest found
for each.
"""
return cdist(self.xyz, mesh.xyz).min(axis=0) | python | def get_min_distance(self, mesh):
"""
Compute and return the minimum distance from the mesh to each point
in another mesh.
:returns:
numpy array of distances in km of shape (self.size, mesh.size)
Method doesn't make any assumptions on arrangement of the points
in either mesh and instead calculates the distance from each point of
this mesh to each point of the target mesh and returns the lowest found
for each.
"""
return cdist(self.xyz, mesh.xyz).min(axis=0) | [
"def",
"get_min_distance",
"(",
"self",
",",
"mesh",
")",
":",
"return",
"cdist",
"(",
"self",
".",
"xyz",
",",
"mesh",
".",
"xyz",
")",
".",
"min",
"(",
"axis",
"=",
"0",
")"
] | Compute and return the minimum distance from the mesh to each point
in another mesh.
:returns:
numpy array of distances in km of shape (self.size, mesh.size)
Method doesn't make any assumptions on arrangement of the points
in either mesh and instead calculates the distance from each point of
this mesh to each point of the target mesh and returns the lowest found
for each. | [
"Compute",
"and",
"return",
"the",
"minimum",
"distance",
"from",
"the",
"mesh",
"to",
"each",
"point",
"in",
"another",
"mesh",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/mesh.py#L236-L249 | train | 233,062 |
gem/oq-engine | openquake/hazardlib/geo/mesh.py | Mesh.get_closest_points | def get_closest_points(self, mesh):
"""
Find closest point of this mesh for each point in the other mesh
:returns:
:class:`Mesh` object of the same shape as `mesh` with closest
points from this one at respective indices.
"""
min_idx = cdist(self.xyz, mesh.xyz).argmin(axis=0) # lose shape
if hasattr(mesh, 'shape'):
min_idx = min_idx.reshape(mesh.shape)
lons = self.lons.take(min_idx)
lats = self.lats.take(min_idx)
deps = self.depths.take(min_idx)
return Mesh(lons, lats, deps) | python | def get_closest_points(self, mesh):
"""
Find closest point of this mesh for each point in the other mesh
:returns:
:class:`Mesh` object of the same shape as `mesh` with closest
points from this one at respective indices.
"""
min_idx = cdist(self.xyz, mesh.xyz).argmin(axis=0) # lose shape
if hasattr(mesh, 'shape'):
min_idx = min_idx.reshape(mesh.shape)
lons = self.lons.take(min_idx)
lats = self.lats.take(min_idx)
deps = self.depths.take(min_idx)
return Mesh(lons, lats, deps) | [
"def",
"get_closest_points",
"(",
"self",
",",
"mesh",
")",
":",
"min_idx",
"=",
"cdist",
"(",
"self",
".",
"xyz",
",",
"mesh",
".",
"xyz",
")",
".",
"argmin",
"(",
"axis",
"=",
"0",
")",
"# lose shape",
"if",
"hasattr",
"(",
"mesh",
",",
"'shape'",
")",
":",
"min_idx",
"=",
"min_idx",
".",
"reshape",
"(",
"mesh",
".",
"shape",
")",
"lons",
"=",
"self",
".",
"lons",
".",
"take",
"(",
"min_idx",
")",
"lats",
"=",
"self",
".",
"lats",
".",
"take",
"(",
"min_idx",
")",
"deps",
"=",
"self",
".",
"depths",
".",
"take",
"(",
"min_idx",
")",
"return",
"Mesh",
"(",
"lons",
",",
"lats",
",",
"deps",
")"
] | Find closest point of this mesh for each point in the other mesh
:returns:
:class:`Mesh` object of the same shape as `mesh` with closest
points from this one at respective indices. | [
"Find",
"closest",
"point",
"of",
"this",
"mesh",
"for",
"each",
"point",
"in",
"the",
"other",
"mesh"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/mesh.py#L251-L265 | train | 233,063 |
gem/oq-engine | openquake/hazardlib/geo/mesh.py | Mesh.get_distance_matrix | def get_distance_matrix(self):
"""
Compute and return distances between each pairs of points in the mesh.
This method requires that the coordinate arrays are one-dimensional.
NB: the depth of the points is ignored
.. warning::
Because of its quadratic space and time complexity this method
is safe to use for meshes of up to several thousand points. For
mesh of 10k points it needs ~800 Mb for just the resulting matrix
and four times that much for intermediate storage.
:returns:
Two-dimensional numpy array, square matrix of distances. The matrix
has zeros on main diagonal and positive distances in kilometers
on all other cells. That is, value in cell (3, 5) is the distance
between mesh's points 3 and 5 in km, and it is equal to value
in cell (5, 3).
Uses :func:`openquake.hazardlib.geo.geodetic.geodetic_distance`.
"""
assert self.lons.ndim == 1
distances = geodetic.geodetic_distance(
self.lons.reshape(self.lons.shape + (1, )),
self.lats.reshape(self.lats.shape + (1, )),
self.lons,
self.lats)
return numpy.matrix(distances, copy=False) | python | def get_distance_matrix(self):
"""
Compute and return distances between each pairs of points in the mesh.
This method requires that the coordinate arrays are one-dimensional.
NB: the depth of the points is ignored
.. warning::
Because of its quadratic space and time complexity this method
is safe to use for meshes of up to several thousand points. For
mesh of 10k points it needs ~800 Mb for just the resulting matrix
and four times that much for intermediate storage.
:returns:
Two-dimensional numpy array, square matrix of distances. The matrix
has zeros on main diagonal and positive distances in kilometers
on all other cells. That is, value in cell (3, 5) is the distance
between mesh's points 3 and 5 in km, and it is equal to value
in cell (5, 3).
Uses :func:`openquake.hazardlib.geo.geodetic.geodetic_distance`.
"""
assert self.lons.ndim == 1
distances = geodetic.geodetic_distance(
self.lons.reshape(self.lons.shape + (1, )),
self.lats.reshape(self.lats.shape + (1, )),
self.lons,
self.lats)
return numpy.matrix(distances, copy=False) | [
"def",
"get_distance_matrix",
"(",
"self",
")",
":",
"assert",
"self",
".",
"lons",
".",
"ndim",
"==",
"1",
"distances",
"=",
"geodetic",
".",
"geodetic_distance",
"(",
"self",
".",
"lons",
".",
"reshape",
"(",
"self",
".",
"lons",
".",
"shape",
"+",
"(",
"1",
",",
")",
")",
",",
"self",
".",
"lats",
".",
"reshape",
"(",
"self",
".",
"lats",
".",
"shape",
"+",
"(",
"1",
",",
")",
")",
",",
"self",
".",
"lons",
",",
"self",
".",
"lats",
")",
"return",
"numpy",
".",
"matrix",
"(",
"distances",
",",
"copy",
"=",
"False",
")"
] | Compute and return distances between each pairs of points in the mesh.
This method requires that the coordinate arrays are one-dimensional.
NB: the depth of the points is ignored
.. warning::
Because of its quadratic space and time complexity this method
is safe to use for meshes of up to several thousand points. For
mesh of 10k points it needs ~800 Mb for just the resulting matrix
and four times that much for intermediate storage.
:returns:
Two-dimensional numpy array, square matrix of distances. The matrix
has zeros on main diagonal and positive distances in kilometers
on all other cells. That is, value in cell (3, 5) is the distance
between mesh's points 3 and 5 in km, and it is equal to value
in cell (5, 3).
Uses :func:`openquake.hazardlib.geo.geodetic.geodetic_distance`. | [
"Compute",
"and",
"return",
"distances",
"between",
"each",
"pairs",
"of",
"points",
"in",
"the",
"mesh",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/mesh.py#L267-L295 | train | 233,064 |
gem/oq-engine | openquake/hazardlib/geo/mesh.py | Mesh._get_proj_convex_hull | def _get_proj_convex_hull(self):
"""
Create a projection centered in the center of this mesh and define
a convex polygon in that projection, enveloping all the points
of the mesh.
:returns:
Tuple of two items: projection function and shapely 2d polygon.
Note that the result geometry can be line or point depending
on number of points in the mesh and their arrangement.
"""
# create a projection centered in the center of points collection
proj = geo_utils.OrthographicProjection(
*geo_utils.get_spherical_bounding_box(self.lons, self.lats))
# project all the points and create a shapely multipoint object.
# need to copy an array because otherwise shapely misinterprets it
coords = numpy.transpose(proj(self.lons.flat, self.lats.flat)).copy()
multipoint = shapely.geometry.MultiPoint(coords)
# create a 2d polygon from a convex hull around that multipoint
return proj, multipoint.convex_hull | python | def _get_proj_convex_hull(self):
"""
Create a projection centered in the center of this mesh and define
a convex polygon in that projection, enveloping all the points
of the mesh.
:returns:
Tuple of two items: projection function and shapely 2d polygon.
Note that the result geometry can be line or point depending
on number of points in the mesh and their arrangement.
"""
# create a projection centered in the center of points collection
proj = geo_utils.OrthographicProjection(
*geo_utils.get_spherical_bounding_box(self.lons, self.lats))
# project all the points and create a shapely multipoint object.
# need to copy an array because otherwise shapely misinterprets it
coords = numpy.transpose(proj(self.lons.flat, self.lats.flat)).copy()
multipoint = shapely.geometry.MultiPoint(coords)
# create a 2d polygon from a convex hull around that multipoint
return proj, multipoint.convex_hull | [
"def",
"_get_proj_convex_hull",
"(",
"self",
")",
":",
"# create a projection centered in the center of points collection",
"proj",
"=",
"geo_utils",
".",
"OrthographicProjection",
"(",
"*",
"geo_utils",
".",
"get_spherical_bounding_box",
"(",
"self",
".",
"lons",
",",
"self",
".",
"lats",
")",
")",
"# project all the points and create a shapely multipoint object.",
"# need to copy an array because otherwise shapely misinterprets it",
"coords",
"=",
"numpy",
".",
"transpose",
"(",
"proj",
"(",
"self",
".",
"lons",
".",
"flat",
",",
"self",
".",
"lats",
".",
"flat",
")",
")",
".",
"copy",
"(",
")",
"multipoint",
"=",
"shapely",
".",
"geometry",
".",
"MultiPoint",
"(",
"coords",
")",
"# create a 2d polygon from a convex hull around that multipoint",
"return",
"proj",
",",
"multipoint",
".",
"convex_hull"
] | Create a projection centered in the center of this mesh and define
a convex polygon in that projection, enveloping all the points
of the mesh.
:returns:
Tuple of two items: projection function and shapely 2d polygon.
Note that the result geometry can be line or point depending
on number of points in the mesh and their arrangement. | [
"Create",
"a",
"projection",
"centered",
"in",
"the",
"center",
"of",
"this",
"mesh",
"and",
"define",
"a",
"convex",
"polygon",
"in",
"that",
"projection",
"enveloping",
"all",
"the",
"points",
"of",
"the",
"mesh",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/mesh.py#L297-L317 | train | 233,065 |
gem/oq-engine | openquake/hazardlib/geo/mesh.py | Mesh.get_joyner_boore_distance | def get_joyner_boore_distance(self, mesh):
"""
Compute and return Joyner-Boore distance to each point of ``mesh``.
Point's depth is ignored.
See
:meth:`openquake.hazardlib.geo.surface.base.BaseSurface.get_joyner_boore_distance`
for definition of this distance.
:returns:
numpy array of distances in km of the same shape as ``mesh``.
Distance value is considered to be zero if a point
lies inside the polygon enveloping the projection of the mesh
or on one of its edges.
"""
# we perform a hybrid calculation (geodetic mesh-to-mesh distance
# and distance on the projection plane for close points). first,
# we find the closest geodetic distance for each point of target
# mesh to this one. in general that distance is greater than
# the exact distance to enclosing polygon of this mesh and it
# depends on mesh spacing. but the difference can be neglected
# if calculated geodetic distance is over some threshold.
# get the highest slice from the 3D mesh
distances = geodetic.min_geodetic_distance(
(self.lons, self.lats), (mesh.lons, mesh.lats))
# here we find the points for which calculated mesh-to-mesh
# distance is below a threshold. this threshold is arbitrary:
# lower values increase the maximum possible error, higher
# values reduce the efficiency of that filtering. the maximum
# error is equal to the maximum difference between a distance
# from site to two adjacent points of the mesh and distance
# from site to the line connecting them. thus the error is
# a function of distance threshold and mesh spacing. the error
# is maximum when the site lies on a perpendicular to the line
# connecting points of the mesh and that passes the middle
# point between them. the error then can be calculated as
# ``err = trsh - d = trsh - \sqrt(trsh^2 - (ms/2)^2)``, where
# ``trsh`` and ``d`` are distance to mesh points (the one
# we found on the previous step) and distance to the line
# connecting them (the actual distance) and ``ms`` is mesh
# spacing. the threshold of 40 km gives maximum error of 314
# meters for meshes with spacing of 10 km and 5.36 km for
# meshes with spacing of 40 km. if mesh spacing is over
# ``(trsh / \sqrt(2)) * 2`` then points lying in the middle
# of mesh cells (that is inside the polygon) will be filtered
# out by the threshold and have positive distance instead of 0.
# so for threshold of 40 km mesh spacing should not be more
# than 56 km (typical values are 5 to 10 km).
idxs = (distances < 40).nonzero()[0] # indices on the first dimension
if not len(idxs):
# no point is close enough, return distances as they are
return distances
# for all the points that are closer than the threshold we need
# to recalculate the distance and set it to zero, if point falls
# inside the enclosing polygon of the mesh. for doing that we
# project both this mesh and the points of the second mesh--selected
# by distance threshold--to the same Cartesian space, define
# minimum shapely polygon enclosing the mesh and calculate point
# to polygon distance, which gives the most accurate value
# of distance in km (and that value is zero for points inside
# the polygon).
proj, polygon = self._get_proj_enclosing_polygon()
if not isinstance(polygon, shapely.geometry.Polygon):
# either line or point is our enclosing polygon. draw
# a square with side of 10 m around in order to have
# a proper polygon instead.
polygon = polygon.buffer(self.DIST_TOLERANCE, 1)
mesh_xx, mesh_yy = proj(mesh.lons[idxs], mesh.lats[idxs])
# replace geodetic distance values for points-closer-than-the-threshold
# by more accurate point-to-polygon distance values.
distances[idxs] = geo_utils.point_to_polygon_distance(
polygon, mesh_xx, mesh_yy)
return distances | python | def get_joyner_boore_distance(self, mesh):
"""
Compute and return Joyner-Boore distance to each point of ``mesh``.
Point's depth is ignored.
See
:meth:`openquake.hazardlib.geo.surface.base.BaseSurface.get_joyner_boore_distance`
for definition of this distance.
:returns:
numpy array of distances in km of the same shape as ``mesh``.
Distance value is considered to be zero if a point
lies inside the polygon enveloping the projection of the mesh
or on one of its edges.
"""
# we perform a hybrid calculation (geodetic mesh-to-mesh distance
# and distance on the projection plane for close points). first,
# we find the closest geodetic distance for each point of target
# mesh to this one. in general that distance is greater than
# the exact distance to enclosing polygon of this mesh and it
# depends on mesh spacing. but the difference can be neglected
# if calculated geodetic distance is over some threshold.
# get the highest slice from the 3D mesh
distances = geodetic.min_geodetic_distance(
(self.lons, self.lats), (mesh.lons, mesh.lats))
# here we find the points for which calculated mesh-to-mesh
# distance is below a threshold. this threshold is arbitrary:
# lower values increase the maximum possible error, higher
# values reduce the efficiency of that filtering. the maximum
# error is equal to the maximum difference between a distance
# from site to two adjacent points of the mesh and distance
# from site to the line connecting them. thus the error is
# a function of distance threshold and mesh spacing. the error
# is maximum when the site lies on a perpendicular to the line
# connecting points of the mesh and that passes the middle
# point between them. the error then can be calculated as
# ``err = trsh - d = trsh - \sqrt(trsh^2 - (ms/2)^2)``, where
# ``trsh`` and ``d`` are distance to mesh points (the one
# we found on the previous step) and distance to the line
# connecting them (the actual distance) and ``ms`` is mesh
# spacing. the threshold of 40 km gives maximum error of 314
# meters for meshes with spacing of 10 km and 5.36 km for
# meshes with spacing of 40 km. if mesh spacing is over
# ``(trsh / \sqrt(2)) * 2`` then points lying in the middle
# of mesh cells (that is inside the polygon) will be filtered
# out by the threshold and have positive distance instead of 0.
# so for threshold of 40 km mesh spacing should not be more
# than 56 km (typical values are 5 to 10 km).
idxs = (distances < 40).nonzero()[0] # indices on the first dimension
if not len(idxs):
# no point is close enough, return distances as they are
return distances
# for all the points that are closer than the threshold we need
# to recalculate the distance and set it to zero, if point falls
# inside the enclosing polygon of the mesh. for doing that we
# project both this mesh and the points of the second mesh--selected
# by distance threshold--to the same Cartesian space, define
# minimum shapely polygon enclosing the mesh and calculate point
# to polygon distance, which gives the most accurate value
# of distance in km (and that value is zero for points inside
# the polygon).
proj, polygon = self._get_proj_enclosing_polygon()
if not isinstance(polygon, shapely.geometry.Polygon):
# either line or point is our enclosing polygon. draw
# a square with side of 10 m around in order to have
# a proper polygon instead.
polygon = polygon.buffer(self.DIST_TOLERANCE, 1)
mesh_xx, mesh_yy = proj(mesh.lons[idxs], mesh.lats[idxs])
# replace geodetic distance values for points-closer-than-the-threshold
# by more accurate point-to-polygon distance values.
distances[idxs] = geo_utils.point_to_polygon_distance(
polygon, mesh_xx, mesh_yy)
return distances | [
"def",
"get_joyner_boore_distance",
"(",
"self",
",",
"mesh",
")",
":",
"# we perform a hybrid calculation (geodetic mesh-to-mesh distance",
"# and distance on the projection plane for close points). first,",
"# we find the closest geodetic distance for each point of target",
"# mesh to this one. in general that distance is greater than",
"# the exact distance to enclosing polygon of this mesh and it",
"# depends on mesh spacing. but the difference can be neglected",
"# if calculated geodetic distance is over some threshold.",
"# get the highest slice from the 3D mesh",
"distances",
"=",
"geodetic",
".",
"min_geodetic_distance",
"(",
"(",
"self",
".",
"lons",
",",
"self",
".",
"lats",
")",
",",
"(",
"mesh",
".",
"lons",
",",
"mesh",
".",
"lats",
")",
")",
"# here we find the points for which calculated mesh-to-mesh",
"# distance is below a threshold. this threshold is arbitrary:",
"# lower values increase the maximum possible error, higher",
"# values reduce the efficiency of that filtering. the maximum",
"# error is equal to the maximum difference between a distance",
"# from site to two adjacent points of the mesh and distance",
"# from site to the line connecting them. thus the error is",
"# a function of distance threshold and mesh spacing. the error",
"# is maximum when the site lies on a perpendicular to the line",
"# connecting points of the mesh and that passes the middle",
"# point between them. the error then can be calculated as",
"# ``err = trsh - d = trsh - \\sqrt(trsh^2 - (ms/2)^2)``, where",
"# ``trsh`` and ``d`` are distance to mesh points (the one",
"# we found on the previous step) and distance to the line",
"# connecting them (the actual distance) and ``ms`` is mesh",
"# spacing. the threshold of 40 km gives maximum error of 314",
"# meters for meshes with spacing of 10 km and 5.36 km for",
"# meshes with spacing of 40 km. if mesh spacing is over",
"# ``(trsh / \\sqrt(2)) * 2`` then points lying in the middle",
"# of mesh cells (that is inside the polygon) will be filtered",
"# out by the threshold and have positive distance instead of 0.",
"# so for threshold of 40 km mesh spacing should not be more",
"# than 56 km (typical values are 5 to 10 km).",
"idxs",
"=",
"(",
"distances",
"<",
"40",
")",
".",
"nonzero",
"(",
")",
"[",
"0",
"]",
"# indices on the first dimension",
"if",
"not",
"len",
"(",
"idxs",
")",
":",
"# no point is close enough, return distances as they are",
"return",
"distances",
"# for all the points that are closer than the threshold we need",
"# to recalculate the distance and set it to zero, if point falls",
"# inside the enclosing polygon of the mesh. for doing that we",
"# project both this mesh and the points of the second mesh--selected",
"# by distance threshold--to the same Cartesian space, define",
"# minimum shapely polygon enclosing the mesh and calculate point",
"# to polygon distance, which gives the most accurate value",
"# of distance in km (and that value is zero for points inside",
"# the polygon).",
"proj",
",",
"polygon",
"=",
"self",
".",
"_get_proj_enclosing_polygon",
"(",
")",
"if",
"not",
"isinstance",
"(",
"polygon",
",",
"shapely",
".",
"geometry",
".",
"Polygon",
")",
":",
"# either line or point is our enclosing polygon. draw",
"# a square with side of 10 m around in order to have",
"# a proper polygon instead.",
"polygon",
"=",
"polygon",
".",
"buffer",
"(",
"self",
".",
"DIST_TOLERANCE",
",",
"1",
")",
"mesh_xx",
",",
"mesh_yy",
"=",
"proj",
"(",
"mesh",
".",
"lons",
"[",
"idxs",
"]",
",",
"mesh",
".",
"lats",
"[",
"idxs",
"]",
")",
"# replace geodetic distance values for points-closer-than-the-threshold",
"# by more accurate point-to-polygon distance values.",
"distances",
"[",
"idxs",
"]",
"=",
"geo_utils",
".",
"point_to_polygon_distance",
"(",
"polygon",
",",
"mesh_xx",
",",
"mesh_yy",
")",
"return",
"distances"
] | Compute and return Joyner-Boore distance to each point of ``mesh``.
Point's depth is ignored.
See
:meth:`openquake.hazardlib.geo.surface.base.BaseSurface.get_joyner_boore_distance`
for definition of this distance.
:returns:
numpy array of distances in km of the same shape as ``mesh``.
Distance value is considered to be zero if a point
lies inside the polygon enveloping the projection of the mesh
or on one of its edges. | [
"Compute",
"and",
"return",
"Joyner",
"-",
"Boore",
"distance",
"to",
"each",
"point",
"of",
"mesh",
".",
"Point",
"s",
"depth",
"is",
"ignored",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/mesh.py#L319-L393 | train | 233,066 |
gem/oq-engine | openquake/hazardlib/geo/mesh.py | Mesh.get_convex_hull | def get_convex_hull(self):
"""
Get a convex polygon object that contains projections of all the points
of the mesh.
:returns:
Instance of :class:`openquake.hazardlib.geo.polygon.Polygon` that
is a convex hull around all the points in this mesh. If the
original mesh had only one point, the resulting polygon has a
square shape with a side length of 10 meters. If there were only
two points, resulting polygon is a stripe 10 meters wide.
"""
proj, polygon2d = self._get_proj_convex_hull()
# if mesh had only one point, the convex hull is a point. if there
# were two, it is a line string. we need to return a convex polygon
# object, so extend that area-less geometries by some arbitrarily
# small distance.
if isinstance(polygon2d, (shapely.geometry.LineString,
shapely.geometry.Point)):
polygon2d = polygon2d.buffer(self.DIST_TOLERANCE, 1)
# avoid circular imports
from openquake.hazardlib.geo.polygon import Polygon
return Polygon._from_2d(polygon2d, proj) | python | def get_convex_hull(self):
"""
Get a convex polygon object that contains projections of all the points
of the mesh.
:returns:
Instance of :class:`openquake.hazardlib.geo.polygon.Polygon` that
is a convex hull around all the points in this mesh. If the
original mesh had only one point, the resulting polygon has a
square shape with a side length of 10 meters. If there were only
two points, resulting polygon is a stripe 10 meters wide.
"""
proj, polygon2d = self._get_proj_convex_hull()
# if mesh had only one point, the convex hull is a point. if there
# were two, it is a line string. we need to return a convex polygon
# object, so extend that area-less geometries by some arbitrarily
# small distance.
if isinstance(polygon2d, (shapely.geometry.LineString,
shapely.geometry.Point)):
polygon2d = polygon2d.buffer(self.DIST_TOLERANCE, 1)
# avoid circular imports
from openquake.hazardlib.geo.polygon import Polygon
return Polygon._from_2d(polygon2d, proj) | [
"def",
"get_convex_hull",
"(",
"self",
")",
":",
"proj",
",",
"polygon2d",
"=",
"self",
".",
"_get_proj_convex_hull",
"(",
")",
"# if mesh had only one point, the convex hull is a point. if there",
"# were two, it is a line string. we need to return a convex polygon",
"# object, so extend that area-less geometries by some arbitrarily",
"# small distance.",
"if",
"isinstance",
"(",
"polygon2d",
",",
"(",
"shapely",
".",
"geometry",
".",
"LineString",
",",
"shapely",
".",
"geometry",
".",
"Point",
")",
")",
":",
"polygon2d",
"=",
"polygon2d",
".",
"buffer",
"(",
"self",
".",
"DIST_TOLERANCE",
",",
"1",
")",
"# avoid circular imports",
"from",
"openquake",
".",
"hazardlib",
".",
"geo",
".",
"polygon",
"import",
"Polygon",
"return",
"Polygon",
".",
"_from_2d",
"(",
"polygon2d",
",",
"proj",
")"
] | Get a convex polygon object that contains projections of all the points
of the mesh.
:returns:
Instance of :class:`openquake.hazardlib.geo.polygon.Polygon` that
is a convex hull around all the points in this mesh. If the
original mesh had only one point, the resulting polygon has a
square shape with a side length of 10 meters. If there were only
two points, resulting polygon is a stripe 10 meters wide. | [
"Get",
"a",
"convex",
"polygon",
"object",
"that",
"contains",
"projections",
"of",
"all",
"the",
"points",
"of",
"the",
"mesh",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/mesh.py#L457-L480 | train | 233,067 |
gem/oq-engine | openquake/hazardlib/geo/mesh.py | RectangularMesh.from_points_list | def from_points_list(cls, points):
"""
Create a rectangular mesh object from a list of lists of points.
Lists in a list are supposed to have the same length.
:param point:
List of lists of :class:`~openquake.hazardlib.geo.point.Point`
objects.
"""
assert points is not None and len(points) > 0 and len(points[0]) > 0, \
'list of at least one non-empty list of points is required'
lons = numpy.zeros((len(points), len(points[0])), dtype=float)
lats = lons.copy()
depths = lons.copy()
num_cols = len(points[0])
for i, row in enumerate(points):
assert len(row) == num_cols, \
'lists of points are not of uniform length'
for j, point in enumerate(row):
lons[i, j] = point.longitude
lats[i, j] = point.latitude
depths[i, j] = point.depth
if not depths.any():
depths = None
return cls(lons, lats, depths) | python | def from_points_list(cls, points):
"""
Create a rectangular mesh object from a list of lists of points.
Lists in a list are supposed to have the same length.
:param point:
List of lists of :class:`~openquake.hazardlib.geo.point.Point`
objects.
"""
assert points is not None and len(points) > 0 and len(points[0]) > 0, \
'list of at least one non-empty list of points is required'
lons = numpy.zeros((len(points), len(points[0])), dtype=float)
lats = lons.copy()
depths = lons.copy()
num_cols = len(points[0])
for i, row in enumerate(points):
assert len(row) == num_cols, \
'lists of points are not of uniform length'
for j, point in enumerate(row):
lons[i, j] = point.longitude
lats[i, j] = point.latitude
depths[i, j] = point.depth
if not depths.any():
depths = None
return cls(lons, lats, depths) | [
"def",
"from_points_list",
"(",
"cls",
",",
"points",
")",
":",
"assert",
"points",
"is",
"not",
"None",
"and",
"len",
"(",
"points",
")",
">",
"0",
"and",
"len",
"(",
"points",
"[",
"0",
"]",
")",
">",
"0",
",",
"'list of at least one non-empty list of points is required'",
"lons",
"=",
"numpy",
".",
"zeros",
"(",
"(",
"len",
"(",
"points",
")",
",",
"len",
"(",
"points",
"[",
"0",
"]",
")",
")",
",",
"dtype",
"=",
"float",
")",
"lats",
"=",
"lons",
".",
"copy",
"(",
")",
"depths",
"=",
"lons",
".",
"copy",
"(",
")",
"num_cols",
"=",
"len",
"(",
"points",
"[",
"0",
"]",
")",
"for",
"i",
",",
"row",
"in",
"enumerate",
"(",
"points",
")",
":",
"assert",
"len",
"(",
"row",
")",
"==",
"num_cols",
",",
"'lists of points are not of uniform length'",
"for",
"j",
",",
"point",
"in",
"enumerate",
"(",
"row",
")",
":",
"lons",
"[",
"i",
",",
"j",
"]",
"=",
"point",
".",
"longitude",
"lats",
"[",
"i",
",",
"j",
"]",
"=",
"point",
".",
"latitude",
"depths",
"[",
"i",
",",
"j",
"]",
"=",
"point",
".",
"depth",
"if",
"not",
"depths",
".",
"any",
"(",
")",
":",
"depths",
"=",
"None",
"return",
"cls",
"(",
"lons",
",",
"lats",
",",
"depths",
")"
] | Create a rectangular mesh object from a list of lists of points.
Lists in a list are supposed to have the same length.
:param point:
List of lists of :class:`~openquake.hazardlib.geo.point.Point`
objects. | [
"Create",
"a",
"rectangular",
"mesh",
"object",
"from",
"a",
"list",
"of",
"lists",
"of",
"points",
".",
"Lists",
"in",
"a",
"list",
"are",
"supposed",
"to",
"have",
"the",
"same",
"length",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/mesh.py#L497-L521 | train | 233,068 |
gem/oq-engine | openquake/hazardlib/geo/mesh.py | RectangularMesh.get_middle_point | def get_middle_point(self):
"""
Return the middle point of the mesh.
:returns:
An instance of :class:`~openquake.hazardlib.geo.point.Point`.
The middle point is taken from the middle row and a middle column
of the mesh if there are odd number of both. Otherwise the geometric
mean point of two or four middle points.
"""
num_rows, num_cols = self.lons.shape
mid_row = num_rows // 2
depth = 0
if num_rows & 1 == 1:
# there are odd number of rows
mid_col = num_cols // 2
if num_cols & 1 == 1:
# odd number of columns, we can easily take
# the middle point
depth = self.depths[mid_row, mid_col]
return Point(self.lons[mid_row, mid_col],
self.lats[mid_row, mid_col], depth)
else:
# even number of columns, need to take two middle
# points on the middle row
lon1, lon2 = self.lons[mid_row, mid_col - 1: mid_col + 1]
lat1, lat2 = self.lats[mid_row, mid_col - 1: mid_col + 1]
depth1 = self.depths[mid_row, mid_col - 1]
depth2 = self.depths[mid_row, mid_col]
else:
# there are even number of rows. take the row just above
# and the one just below the middle and find middle point
# of each
submesh1 = self[mid_row - 1: mid_row]
submesh2 = self[mid_row: mid_row + 1]
p1, p2 = submesh1.get_middle_point(), submesh2.get_middle_point()
lon1, lat1, depth1 = p1.longitude, p1.latitude, p1.depth
lon2, lat2, depth2 = p2.longitude, p2.latitude, p2.depth
# we need to find the middle between two points
depth = (depth1 + depth2) / 2.0
lon, lat = geo_utils.get_middle_point(lon1, lat1, lon2, lat2)
return Point(lon, lat, depth) | python | def get_middle_point(self):
"""
Return the middle point of the mesh.
:returns:
An instance of :class:`~openquake.hazardlib.geo.point.Point`.
The middle point is taken from the middle row and a middle column
of the mesh if there are odd number of both. Otherwise the geometric
mean point of two or four middle points.
"""
num_rows, num_cols = self.lons.shape
mid_row = num_rows // 2
depth = 0
if num_rows & 1 == 1:
# there are odd number of rows
mid_col = num_cols // 2
if num_cols & 1 == 1:
# odd number of columns, we can easily take
# the middle point
depth = self.depths[mid_row, mid_col]
return Point(self.lons[mid_row, mid_col],
self.lats[mid_row, mid_col], depth)
else:
# even number of columns, need to take two middle
# points on the middle row
lon1, lon2 = self.lons[mid_row, mid_col - 1: mid_col + 1]
lat1, lat2 = self.lats[mid_row, mid_col - 1: mid_col + 1]
depth1 = self.depths[mid_row, mid_col - 1]
depth2 = self.depths[mid_row, mid_col]
else:
# there are even number of rows. take the row just above
# and the one just below the middle and find middle point
# of each
submesh1 = self[mid_row - 1: mid_row]
submesh2 = self[mid_row: mid_row + 1]
p1, p2 = submesh1.get_middle_point(), submesh2.get_middle_point()
lon1, lat1, depth1 = p1.longitude, p1.latitude, p1.depth
lon2, lat2, depth2 = p2.longitude, p2.latitude, p2.depth
# we need to find the middle between two points
depth = (depth1 + depth2) / 2.0
lon, lat = geo_utils.get_middle_point(lon1, lat1, lon2, lat2)
return Point(lon, lat, depth) | [
"def",
"get_middle_point",
"(",
"self",
")",
":",
"num_rows",
",",
"num_cols",
"=",
"self",
".",
"lons",
".",
"shape",
"mid_row",
"=",
"num_rows",
"//",
"2",
"depth",
"=",
"0",
"if",
"num_rows",
"&",
"1",
"==",
"1",
":",
"# there are odd number of rows",
"mid_col",
"=",
"num_cols",
"//",
"2",
"if",
"num_cols",
"&",
"1",
"==",
"1",
":",
"# odd number of columns, we can easily take",
"# the middle point",
"depth",
"=",
"self",
".",
"depths",
"[",
"mid_row",
",",
"mid_col",
"]",
"return",
"Point",
"(",
"self",
".",
"lons",
"[",
"mid_row",
",",
"mid_col",
"]",
",",
"self",
".",
"lats",
"[",
"mid_row",
",",
"mid_col",
"]",
",",
"depth",
")",
"else",
":",
"# even number of columns, need to take two middle",
"# points on the middle row",
"lon1",
",",
"lon2",
"=",
"self",
".",
"lons",
"[",
"mid_row",
",",
"mid_col",
"-",
"1",
":",
"mid_col",
"+",
"1",
"]",
"lat1",
",",
"lat2",
"=",
"self",
".",
"lats",
"[",
"mid_row",
",",
"mid_col",
"-",
"1",
":",
"mid_col",
"+",
"1",
"]",
"depth1",
"=",
"self",
".",
"depths",
"[",
"mid_row",
",",
"mid_col",
"-",
"1",
"]",
"depth2",
"=",
"self",
".",
"depths",
"[",
"mid_row",
",",
"mid_col",
"]",
"else",
":",
"# there are even number of rows. take the row just above",
"# and the one just below the middle and find middle point",
"# of each",
"submesh1",
"=",
"self",
"[",
"mid_row",
"-",
"1",
":",
"mid_row",
"]",
"submesh2",
"=",
"self",
"[",
"mid_row",
":",
"mid_row",
"+",
"1",
"]",
"p1",
",",
"p2",
"=",
"submesh1",
".",
"get_middle_point",
"(",
")",
",",
"submesh2",
".",
"get_middle_point",
"(",
")",
"lon1",
",",
"lat1",
",",
"depth1",
"=",
"p1",
".",
"longitude",
",",
"p1",
".",
"latitude",
",",
"p1",
".",
"depth",
"lon2",
",",
"lat2",
",",
"depth2",
"=",
"p2",
".",
"longitude",
",",
"p2",
".",
"latitude",
",",
"p2",
".",
"depth",
"# we need to find the middle between two points",
"depth",
"=",
"(",
"depth1",
"+",
"depth2",
")",
"/",
"2.0",
"lon",
",",
"lat",
"=",
"geo_utils",
".",
"get_middle_point",
"(",
"lon1",
",",
"lat1",
",",
"lon2",
",",
"lat2",
")",
"return",
"Point",
"(",
"lon",
",",
"lat",
",",
"depth",
")"
] | Return the middle point of the mesh.
:returns:
An instance of :class:`~openquake.hazardlib.geo.point.Point`.
The middle point is taken from the middle row and a middle column
of the mesh if there are odd number of both. Otherwise the geometric
mean point of two or four middle points. | [
"Return",
"the",
"middle",
"point",
"of",
"the",
"mesh",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/mesh.py#L523-L566 | train | 233,069 |
gem/oq-engine | openquake/hazardlib/geo/mesh.py | RectangularMesh.get_cell_dimensions | def get_cell_dimensions(self):
"""
Calculate centroid, width, length and area of each mesh cell.
:returns:
Tuple of four elements, each being 2d numpy array.
Each array has both dimensions less by one the dimensions
of the mesh, since they represent cells, not vertices.
Arrays contain the following cell information:
#. centroids, 3d vectors in a Cartesian space,
#. length (size along row of points) in km,
#. width (size along column of points) in km,
#. area in square km.
"""
points, along_azimuth, updip, diag = self.triangulate()
top = along_azimuth[:-1]
left = updip[:, :-1]
tl_area = geo_utils.triangle_area(top, left, diag)
top_length = numpy.sqrt(numpy.sum(top * top, axis=-1))
left_length = numpy.sqrt(numpy.sum(left * left, axis=-1))
bottom = along_azimuth[1:]
right = updip[:, 1:]
br_area = geo_utils.triangle_area(bottom, right, diag)
bottom_length = numpy.sqrt(numpy.sum(bottom * bottom, axis=-1))
right_length = numpy.sqrt(numpy.sum(right * right, axis=-1))
cell_area = tl_area + br_area
tl_center = (points[:-1, :-1] + points[:-1, 1:] + points[1:, :-1]) / 3
br_center = (points[:-1, 1:] + points[1:, :-1] + points[1:, 1:]) / 3
cell_center = ((tl_center * tl_area.reshape(tl_area.shape + (1, ))
+ br_center * br_area.reshape(br_area.shape + (1, )))
/ cell_area.reshape(cell_area.shape + (1, )))
cell_length = ((top_length * tl_area + bottom_length * br_area)
/ cell_area)
cell_width = ((left_length * tl_area + right_length * br_area)
/ cell_area)
return cell_center, cell_length, cell_width, cell_area | python | def get_cell_dimensions(self):
"""
Calculate centroid, width, length and area of each mesh cell.
:returns:
Tuple of four elements, each being 2d numpy array.
Each array has both dimensions less by one the dimensions
of the mesh, since they represent cells, not vertices.
Arrays contain the following cell information:
#. centroids, 3d vectors in a Cartesian space,
#. length (size along row of points) in km,
#. width (size along column of points) in km,
#. area in square km.
"""
points, along_azimuth, updip, diag = self.triangulate()
top = along_azimuth[:-1]
left = updip[:, :-1]
tl_area = geo_utils.triangle_area(top, left, diag)
top_length = numpy.sqrt(numpy.sum(top * top, axis=-1))
left_length = numpy.sqrt(numpy.sum(left * left, axis=-1))
bottom = along_azimuth[1:]
right = updip[:, 1:]
br_area = geo_utils.triangle_area(bottom, right, diag)
bottom_length = numpy.sqrt(numpy.sum(bottom * bottom, axis=-1))
right_length = numpy.sqrt(numpy.sum(right * right, axis=-1))
cell_area = tl_area + br_area
tl_center = (points[:-1, :-1] + points[:-1, 1:] + points[1:, :-1]) / 3
br_center = (points[:-1, 1:] + points[1:, :-1] + points[1:, 1:]) / 3
cell_center = ((tl_center * tl_area.reshape(tl_area.shape + (1, ))
+ br_center * br_area.reshape(br_area.shape + (1, )))
/ cell_area.reshape(cell_area.shape + (1, )))
cell_length = ((top_length * tl_area + bottom_length * br_area)
/ cell_area)
cell_width = ((left_length * tl_area + right_length * br_area)
/ cell_area)
return cell_center, cell_length, cell_width, cell_area | [
"def",
"get_cell_dimensions",
"(",
"self",
")",
":",
"points",
",",
"along_azimuth",
",",
"updip",
",",
"diag",
"=",
"self",
".",
"triangulate",
"(",
")",
"top",
"=",
"along_azimuth",
"[",
":",
"-",
"1",
"]",
"left",
"=",
"updip",
"[",
":",
",",
":",
"-",
"1",
"]",
"tl_area",
"=",
"geo_utils",
".",
"triangle_area",
"(",
"top",
",",
"left",
",",
"diag",
")",
"top_length",
"=",
"numpy",
".",
"sqrt",
"(",
"numpy",
".",
"sum",
"(",
"top",
"*",
"top",
",",
"axis",
"=",
"-",
"1",
")",
")",
"left_length",
"=",
"numpy",
".",
"sqrt",
"(",
"numpy",
".",
"sum",
"(",
"left",
"*",
"left",
",",
"axis",
"=",
"-",
"1",
")",
")",
"bottom",
"=",
"along_azimuth",
"[",
"1",
":",
"]",
"right",
"=",
"updip",
"[",
":",
",",
"1",
":",
"]",
"br_area",
"=",
"geo_utils",
".",
"triangle_area",
"(",
"bottom",
",",
"right",
",",
"diag",
")",
"bottom_length",
"=",
"numpy",
".",
"sqrt",
"(",
"numpy",
".",
"sum",
"(",
"bottom",
"*",
"bottom",
",",
"axis",
"=",
"-",
"1",
")",
")",
"right_length",
"=",
"numpy",
".",
"sqrt",
"(",
"numpy",
".",
"sum",
"(",
"right",
"*",
"right",
",",
"axis",
"=",
"-",
"1",
")",
")",
"cell_area",
"=",
"tl_area",
"+",
"br_area",
"tl_center",
"=",
"(",
"points",
"[",
":",
"-",
"1",
",",
":",
"-",
"1",
"]",
"+",
"points",
"[",
":",
"-",
"1",
",",
"1",
":",
"]",
"+",
"points",
"[",
"1",
":",
",",
":",
"-",
"1",
"]",
")",
"/",
"3",
"br_center",
"=",
"(",
"points",
"[",
":",
"-",
"1",
",",
"1",
":",
"]",
"+",
"points",
"[",
"1",
":",
",",
":",
"-",
"1",
"]",
"+",
"points",
"[",
"1",
":",
",",
"1",
":",
"]",
")",
"/",
"3",
"cell_center",
"=",
"(",
"(",
"tl_center",
"*",
"tl_area",
".",
"reshape",
"(",
"tl_area",
".",
"shape",
"+",
"(",
"1",
",",
")",
")",
"+",
"br_center",
"*",
"br_area",
".",
"reshape",
"(",
"br_area",
".",
"shape",
"+",
"(",
"1",
",",
")",
")",
")",
"/",
"cell_area",
".",
"reshape",
"(",
"cell_area",
".",
"shape",
"+",
"(",
"1",
",",
")",
")",
")",
"cell_length",
"=",
"(",
"(",
"top_length",
"*",
"tl_area",
"+",
"bottom_length",
"*",
"br_area",
")",
"/",
"cell_area",
")",
"cell_width",
"=",
"(",
"(",
"left_length",
"*",
"tl_area",
"+",
"right_length",
"*",
"br_area",
")",
"/",
"cell_area",
")",
"return",
"cell_center",
",",
"cell_length",
",",
"cell_width",
",",
"cell_area"
] | Calculate centroid, width, length and area of each mesh cell.
:returns:
Tuple of four elements, each being 2d numpy array.
Each array has both dimensions less by one the dimensions
of the mesh, since they represent cells, not vertices.
Arrays contain the following cell information:
#. centroids, 3d vectors in a Cartesian space,
#. length (size along row of points) in km,
#. width (size along column of points) in km,
#. area in square km. | [
"Calculate",
"centroid",
"width",
"length",
"and",
"area",
"of",
"each",
"mesh",
"cell",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/mesh.py#L704-L746 | train | 233,070 |
gem/oq-engine | openquake/hazardlib/geo/mesh.py | RectangularMesh.triangulate | def triangulate(self):
"""
Convert mesh points to vectors in Cartesian space.
:returns:
Tuple of four elements, each being 2d numpy array of 3d vectors
(the same structure and shape as the mesh itself). Those arrays
are:
#. points vectors,
#. vectors directed from each point (excluding the last column)
to the next one in a same row →,
#. vectors directed from each point (excluding the first row)
to the previous one in a same column ↑,
#. vectors pointing from a bottom left point of each mesh cell
to top right one ↗.
So the last three arrays of vectors allow to construct triangles
covering the whole mesh.
"""
points = geo_utils.spherical_to_cartesian(self.lons, self.lats,
self.depths)
# triangulate the mesh by defining vectors of triangles edges:
# →
along_azimuth = points[:, 1:] - points[:, :-1]
# ↑
updip = points[:-1] - points[1:]
# ↗
diag = points[:-1, 1:] - points[1:, :-1]
return points, along_azimuth, updip, diag | python | def triangulate(self):
"""
Convert mesh points to vectors in Cartesian space.
:returns:
Tuple of four elements, each being 2d numpy array of 3d vectors
(the same structure and shape as the mesh itself). Those arrays
are:
#. points vectors,
#. vectors directed from each point (excluding the last column)
to the next one in a same row →,
#. vectors directed from each point (excluding the first row)
to the previous one in a same column ↑,
#. vectors pointing from a bottom left point of each mesh cell
to top right one ↗.
So the last three arrays of vectors allow to construct triangles
covering the whole mesh.
"""
points = geo_utils.spherical_to_cartesian(self.lons, self.lats,
self.depths)
# triangulate the mesh by defining vectors of triangles edges:
# →
along_azimuth = points[:, 1:] - points[:, :-1]
# ↑
updip = points[:-1] - points[1:]
# ↗
diag = points[:-1, 1:] - points[1:, :-1]
return points, along_azimuth, updip, diag | [
"def",
"triangulate",
"(",
"self",
")",
":",
"points",
"=",
"geo_utils",
".",
"spherical_to_cartesian",
"(",
"self",
".",
"lons",
",",
"self",
".",
"lats",
",",
"self",
".",
"depths",
")",
"# triangulate the mesh by defining vectors of triangles edges:",
"# →",
"along_azimuth",
"=",
"points",
"[",
":",
",",
"1",
":",
"]",
"-",
"points",
"[",
":",
",",
":",
"-",
"1",
"]",
"# ↑",
"updip",
"=",
"points",
"[",
":",
"-",
"1",
"]",
"-",
"points",
"[",
"1",
":",
"]",
"# ↗",
"diag",
"=",
"points",
"[",
":",
"-",
"1",
",",
"1",
":",
"]",
"-",
"points",
"[",
"1",
":",
",",
":",
"-",
"1",
"]",
"return",
"points",
",",
"along_azimuth",
",",
"updip",
",",
"diag"
] | Convert mesh points to vectors in Cartesian space.
:returns:
Tuple of four elements, each being 2d numpy array of 3d vectors
(the same structure and shape as the mesh itself). Those arrays
are:
#. points vectors,
#. vectors directed from each point (excluding the last column)
to the next one in a same row →,
#. vectors directed from each point (excluding the first row)
to the previous one in a same column ↑,
#. vectors pointing from a bottom left point of each mesh cell
to top right one ↗.
So the last three arrays of vectors allow to construct triangles
covering the whole mesh. | [
"Convert",
"mesh",
"points",
"to",
"vectors",
"in",
"Cartesian",
"space",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/mesh.py#L748-L778 | train | 233,071 |
gem/oq-engine | openquake/hmtk/seismicity/smoothing/kernels/isotropic_gaussian.py | IsotropicGaussian.smooth_data | def smooth_data(self, data, config, is_3d=False):
'''
Applies the smoothing kernel to the data
:param np.ndarray data:
Raw earthquake count in the form [Longitude, Latitude, Depth,
Count]
:param dict config:
Configuration parameters must contain:
* BandWidth: The bandwidth of the kernel (in km) (float)
* Length_Limit: Maximum number of standard deviations
:returns:
* smoothed_value: np.ndarray vector of smoothed values
* Total (summed) rate of the original values
* Total (summed) rate of the smoothed values
'''
max_dist = config['Length_Limit'] * config['BandWidth']
smoothed_value = np.zeros(len(data), dtype=float)
for iloc in range(0, len(data)):
dist_val = haversine(data[:, 0], data[:, 1],
data[iloc, 0], data[iloc, 1])
if is_3d:
dist_val = np.sqrt(dist_val.flatten() ** 2.0 +
(data[:, 2] - data[iloc, 2]) ** 2.0)
id0 = np.where(dist_val <= max_dist)[0]
w_val = (np.exp(-(dist_val[id0] ** 2.0) /
(config['BandWidth'] ** 2.))).flatten()
smoothed_value[iloc] = np.sum(w_val * data[id0, 3]) / np.sum(w_val)
return smoothed_value, np.sum(data[:, -1]), np.sum(smoothed_value) | python | def smooth_data(self, data, config, is_3d=False):
'''
Applies the smoothing kernel to the data
:param np.ndarray data:
Raw earthquake count in the form [Longitude, Latitude, Depth,
Count]
:param dict config:
Configuration parameters must contain:
* BandWidth: The bandwidth of the kernel (in km) (float)
* Length_Limit: Maximum number of standard deviations
:returns:
* smoothed_value: np.ndarray vector of smoothed values
* Total (summed) rate of the original values
* Total (summed) rate of the smoothed values
'''
max_dist = config['Length_Limit'] * config['BandWidth']
smoothed_value = np.zeros(len(data), dtype=float)
for iloc in range(0, len(data)):
dist_val = haversine(data[:, 0], data[:, 1],
data[iloc, 0], data[iloc, 1])
if is_3d:
dist_val = np.sqrt(dist_val.flatten() ** 2.0 +
(data[:, 2] - data[iloc, 2]) ** 2.0)
id0 = np.where(dist_val <= max_dist)[0]
w_val = (np.exp(-(dist_val[id0] ** 2.0) /
(config['BandWidth'] ** 2.))).flatten()
smoothed_value[iloc] = np.sum(w_val * data[id0, 3]) / np.sum(w_val)
return smoothed_value, np.sum(data[:, -1]), np.sum(smoothed_value) | [
"def",
"smooth_data",
"(",
"self",
",",
"data",
",",
"config",
",",
"is_3d",
"=",
"False",
")",
":",
"max_dist",
"=",
"config",
"[",
"'Length_Limit'",
"]",
"*",
"config",
"[",
"'BandWidth'",
"]",
"smoothed_value",
"=",
"np",
".",
"zeros",
"(",
"len",
"(",
"data",
")",
",",
"dtype",
"=",
"float",
")",
"for",
"iloc",
"in",
"range",
"(",
"0",
",",
"len",
"(",
"data",
")",
")",
":",
"dist_val",
"=",
"haversine",
"(",
"data",
"[",
":",
",",
"0",
"]",
",",
"data",
"[",
":",
",",
"1",
"]",
",",
"data",
"[",
"iloc",
",",
"0",
"]",
",",
"data",
"[",
"iloc",
",",
"1",
"]",
")",
"if",
"is_3d",
":",
"dist_val",
"=",
"np",
".",
"sqrt",
"(",
"dist_val",
".",
"flatten",
"(",
")",
"**",
"2.0",
"+",
"(",
"data",
"[",
":",
",",
"2",
"]",
"-",
"data",
"[",
"iloc",
",",
"2",
"]",
")",
"**",
"2.0",
")",
"id0",
"=",
"np",
".",
"where",
"(",
"dist_val",
"<=",
"max_dist",
")",
"[",
"0",
"]",
"w_val",
"=",
"(",
"np",
".",
"exp",
"(",
"-",
"(",
"dist_val",
"[",
"id0",
"]",
"**",
"2.0",
")",
"/",
"(",
"config",
"[",
"'BandWidth'",
"]",
"**",
"2.",
")",
")",
")",
".",
"flatten",
"(",
")",
"smoothed_value",
"[",
"iloc",
"]",
"=",
"np",
".",
"sum",
"(",
"w_val",
"*",
"data",
"[",
"id0",
",",
"3",
"]",
")",
"/",
"np",
".",
"sum",
"(",
"w_val",
")",
"return",
"smoothed_value",
",",
"np",
".",
"sum",
"(",
"data",
"[",
":",
",",
"-",
"1",
"]",
")",
",",
"np",
".",
"sum",
"(",
"smoothed_value",
")"
] | Applies the smoothing kernel to the data
:param np.ndarray data:
Raw earthquake count in the form [Longitude, Latitude, Depth,
Count]
:param dict config:
Configuration parameters must contain:
* BandWidth: The bandwidth of the kernel (in km) (float)
* Length_Limit: Maximum number of standard deviations
:returns:
* smoothed_value: np.ndarray vector of smoothed values
* Total (summed) rate of the original values
* Total (summed) rate of the smoothed values | [
"Applies",
"the",
"smoothing",
"kernel",
"to",
"the",
"data"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/seismicity/smoothing/kernels/isotropic_gaussian.py#L69-L99 | train | 233,072 |
gem/oq-engine | openquake/commands/purge.py | purge_one | def purge_one(calc_id, user):
"""
Remove one calculation ID from the database and remove its datastore
"""
filename = os.path.join(datadir, 'calc_%s.hdf5' % calc_id)
err = dbcmd('del_calc', calc_id, user)
if err:
print(err)
elif os.path.exists(filename): # not removed yet
os.remove(filename)
print('Removed %s' % filename) | python | def purge_one(calc_id, user):
"""
Remove one calculation ID from the database and remove its datastore
"""
filename = os.path.join(datadir, 'calc_%s.hdf5' % calc_id)
err = dbcmd('del_calc', calc_id, user)
if err:
print(err)
elif os.path.exists(filename): # not removed yet
os.remove(filename)
print('Removed %s' % filename) | [
"def",
"purge_one",
"(",
"calc_id",
",",
"user",
")",
":",
"filename",
"=",
"os",
".",
"path",
".",
"join",
"(",
"datadir",
",",
"'calc_%s.hdf5'",
"%",
"calc_id",
")",
"err",
"=",
"dbcmd",
"(",
"'del_calc'",
",",
"calc_id",
",",
"user",
")",
"if",
"err",
":",
"print",
"(",
"err",
")",
"elif",
"os",
".",
"path",
".",
"exists",
"(",
"filename",
")",
":",
"# not removed yet",
"os",
".",
"remove",
"(",
"filename",
")",
"print",
"(",
"'Removed %s'",
"%",
"filename",
")"
] | Remove one calculation ID from the database and remove its datastore | [
"Remove",
"one",
"calculation",
"ID",
"from",
"the",
"database",
"and",
"remove",
"its",
"datastore"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commands/purge.py#L28-L38 | train | 233,073 |
gem/oq-engine | openquake/commands/purge.py | purge_all | def purge_all(user=None, fast=False):
"""
Remove all calculations of the given user
"""
user = user or getpass.getuser()
if os.path.exists(datadir):
if fast:
shutil.rmtree(datadir)
print('Removed %s' % datadir)
else:
for fname in os.listdir(datadir):
mo = re.match('calc_(\d+)\.hdf5', fname)
if mo is not None:
calc_id = int(mo.group(1))
purge_one(calc_id, user) | python | def purge_all(user=None, fast=False):
"""
Remove all calculations of the given user
"""
user = user or getpass.getuser()
if os.path.exists(datadir):
if fast:
shutil.rmtree(datadir)
print('Removed %s' % datadir)
else:
for fname in os.listdir(datadir):
mo = re.match('calc_(\d+)\.hdf5', fname)
if mo is not None:
calc_id = int(mo.group(1))
purge_one(calc_id, user) | [
"def",
"purge_all",
"(",
"user",
"=",
"None",
",",
"fast",
"=",
"False",
")",
":",
"user",
"=",
"user",
"or",
"getpass",
".",
"getuser",
"(",
")",
"if",
"os",
".",
"path",
".",
"exists",
"(",
"datadir",
")",
":",
"if",
"fast",
":",
"shutil",
".",
"rmtree",
"(",
"datadir",
")",
"print",
"(",
"'Removed %s'",
"%",
"datadir",
")",
"else",
":",
"for",
"fname",
"in",
"os",
".",
"listdir",
"(",
"datadir",
")",
":",
"mo",
"=",
"re",
".",
"match",
"(",
"'calc_(\\d+)\\.hdf5'",
",",
"fname",
")",
"if",
"mo",
"is",
"not",
"None",
":",
"calc_id",
"=",
"int",
"(",
"mo",
".",
"group",
"(",
"1",
")",
")",
"purge_one",
"(",
"calc_id",
",",
"user",
")"
] | Remove all calculations of the given user | [
"Remove",
"all",
"calculations",
"of",
"the",
"given",
"user"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commands/purge.py#L42-L56 | train | 233,074 |
gem/oq-engine | openquake/commands/purge.py | purge | def purge(calc_id):
"""
Remove the given calculation. If you want to remove all calculations,
use oq reset.
"""
if calc_id < 0:
try:
calc_id = datastore.get_calc_ids(datadir)[calc_id]
except IndexError:
print('Calculation %d not found' % calc_id)
return
purge_one(calc_id, getpass.getuser()) | python | def purge(calc_id):
"""
Remove the given calculation. If you want to remove all calculations,
use oq reset.
"""
if calc_id < 0:
try:
calc_id = datastore.get_calc_ids(datadir)[calc_id]
except IndexError:
print('Calculation %d not found' % calc_id)
return
purge_one(calc_id, getpass.getuser()) | [
"def",
"purge",
"(",
"calc_id",
")",
":",
"if",
"calc_id",
"<",
"0",
":",
"try",
":",
"calc_id",
"=",
"datastore",
".",
"get_calc_ids",
"(",
"datadir",
")",
"[",
"calc_id",
"]",
"except",
"IndexError",
":",
"print",
"(",
"'Calculation %d not found'",
"%",
"calc_id",
")",
"return",
"purge_one",
"(",
"calc_id",
",",
"getpass",
".",
"getuser",
"(",
")",
")"
] | Remove the given calculation. If you want to remove all calculations,
use oq reset. | [
"Remove",
"the",
"given",
"calculation",
".",
"If",
"you",
"want",
"to",
"remove",
"all",
"calculations",
"use",
"oq",
"reset",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commands/purge.py#L60-L71 | train | 233,075 |
gem/oq-engine | openquake/hmtk/plotting/patch.py | PolygonPatch | def PolygonPatch(polygon, **kwargs):
"""Constructs a matplotlib patch from a geometric object
The `polygon` may be a Shapely or GeoJSON-like object possibly with holes.
The `kwargs` are those supported by the matplotlib.patches.Polygon class
constructor. Returns an instance of matplotlib.patches.PathPatch.
Example (using Shapely Point and a matplotlib axes):
>> b = Point(0, 0).buffer(1.0)
>> patch = PolygonPatch(b, fc='blue', ec='blue', alpha=0.5)
>> axis.add_patch(patch)
"""
def coding(ob):
# The codes will be all "LINETO" commands, except for "MOVETO"s at the
# beginning of each subpath
n = len(getattr(ob, 'coords', None) or ob)
vals = ones(n, dtype=Path.code_type) * Path.LINETO
vals[0] = Path.MOVETO
return vals
if hasattr(polygon, 'geom_type'): # Shapely
ptype = polygon.geom_type
if ptype == 'Polygon':
polygon = [Polygon(polygon)]
elif ptype == 'MultiPolygon':
polygon = [Polygon(p) for p in polygon]
else:
raise ValueError(
"A polygon or multi-polygon representation is required")
else: # GeoJSON
polygon = getattr(polygon, '__geo_interface__', polygon)
ptype = polygon["type"]
if ptype == 'Polygon':
polygon = [Polygon(polygon)]
elif ptype == 'MultiPolygon':
polygon = [Polygon(p) for p in polygon['coordinates']]
else:
raise ValueError(
"A polygon or multi-polygon representation is required")
vertices = concatenate([
concatenate([asarray(t.exterior)[:, :2]] +
[asarray(r)[:, :2] for r in t.interiors])
for t in polygon])
codes = concatenate([
concatenate([coding(t.exterior)] +
[coding(r) for r in t.interiors]) for t in polygon])
return PathPatch(Path(vertices, codes), **kwargs) | python | def PolygonPatch(polygon, **kwargs):
"""Constructs a matplotlib patch from a geometric object
The `polygon` may be a Shapely or GeoJSON-like object possibly with holes.
The `kwargs` are those supported by the matplotlib.patches.Polygon class
constructor. Returns an instance of matplotlib.patches.PathPatch.
Example (using Shapely Point and a matplotlib axes):
>> b = Point(0, 0).buffer(1.0)
>> patch = PolygonPatch(b, fc='blue', ec='blue', alpha=0.5)
>> axis.add_patch(patch)
"""
def coding(ob):
# The codes will be all "LINETO" commands, except for "MOVETO"s at the
# beginning of each subpath
n = len(getattr(ob, 'coords', None) or ob)
vals = ones(n, dtype=Path.code_type) * Path.LINETO
vals[0] = Path.MOVETO
return vals
if hasattr(polygon, 'geom_type'): # Shapely
ptype = polygon.geom_type
if ptype == 'Polygon':
polygon = [Polygon(polygon)]
elif ptype == 'MultiPolygon':
polygon = [Polygon(p) for p in polygon]
else:
raise ValueError(
"A polygon or multi-polygon representation is required")
else: # GeoJSON
polygon = getattr(polygon, '__geo_interface__', polygon)
ptype = polygon["type"]
if ptype == 'Polygon':
polygon = [Polygon(polygon)]
elif ptype == 'MultiPolygon':
polygon = [Polygon(p) for p in polygon['coordinates']]
else:
raise ValueError(
"A polygon or multi-polygon representation is required")
vertices = concatenate([
concatenate([asarray(t.exterior)[:, :2]] +
[asarray(r)[:, :2] for r in t.interiors])
for t in polygon])
codes = concatenate([
concatenate([coding(t.exterior)] +
[coding(r) for r in t.interiors]) for t in polygon])
return PathPatch(Path(vertices, codes), **kwargs) | [
"def",
"PolygonPatch",
"(",
"polygon",
",",
"*",
"*",
"kwargs",
")",
":",
"def",
"coding",
"(",
"ob",
")",
":",
"# The codes will be all \"LINETO\" commands, except for \"MOVETO\"s at the",
"# beginning of each subpath",
"n",
"=",
"len",
"(",
"getattr",
"(",
"ob",
",",
"'coords'",
",",
"None",
")",
"or",
"ob",
")",
"vals",
"=",
"ones",
"(",
"n",
",",
"dtype",
"=",
"Path",
".",
"code_type",
")",
"*",
"Path",
".",
"LINETO",
"vals",
"[",
"0",
"]",
"=",
"Path",
".",
"MOVETO",
"return",
"vals",
"if",
"hasattr",
"(",
"polygon",
",",
"'geom_type'",
")",
":",
"# Shapely",
"ptype",
"=",
"polygon",
".",
"geom_type",
"if",
"ptype",
"==",
"'Polygon'",
":",
"polygon",
"=",
"[",
"Polygon",
"(",
"polygon",
")",
"]",
"elif",
"ptype",
"==",
"'MultiPolygon'",
":",
"polygon",
"=",
"[",
"Polygon",
"(",
"p",
")",
"for",
"p",
"in",
"polygon",
"]",
"else",
":",
"raise",
"ValueError",
"(",
"\"A polygon or multi-polygon representation is required\"",
")",
"else",
":",
"# GeoJSON",
"polygon",
"=",
"getattr",
"(",
"polygon",
",",
"'__geo_interface__'",
",",
"polygon",
")",
"ptype",
"=",
"polygon",
"[",
"\"type\"",
"]",
"if",
"ptype",
"==",
"'Polygon'",
":",
"polygon",
"=",
"[",
"Polygon",
"(",
"polygon",
")",
"]",
"elif",
"ptype",
"==",
"'MultiPolygon'",
":",
"polygon",
"=",
"[",
"Polygon",
"(",
"p",
")",
"for",
"p",
"in",
"polygon",
"[",
"'coordinates'",
"]",
"]",
"else",
":",
"raise",
"ValueError",
"(",
"\"A polygon or multi-polygon representation is required\"",
")",
"vertices",
"=",
"concatenate",
"(",
"[",
"concatenate",
"(",
"[",
"asarray",
"(",
"t",
".",
"exterior",
")",
"[",
":",
",",
":",
"2",
"]",
"]",
"+",
"[",
"asarray",
"(",
"r",
")",
"[",
":",
",",
":",
"2",
"]",
"for",
"r",
"in",
"t",
".",
"interiors",
"]",
")",
"for",
"t",
"in",
"polygon",
"]",
")",
"codes",
"=",
"concatenate",
"(",
"[",
"concatenate",
"(",
"[",
"coding",
"(",
"t",
".",
"exterior",
")",
"]",
"+",
"[",
"coding",
"(",
"r",
")",
"for",
"r",
"in",
"t",
".",
"interiors",
"]",
")",
"for",
"t",
"in",
"polygon",
"]",
")",
"return",
"PathPatch",
"(",
"Path",
"(",
"vertices",
",",
"codes",
")",
",",
"*",
"*",
"kwargs",
")"
] | Constructs a matplotlib patch from a geometric object
The `polygon` may be a Shapely or GeoJSON-like object possibly with holes.
The `kwargs` are those supported by the matplotlib.patches.Polygon class
constructor. Returns an instance of matplotlib.patches.PathPatch.
Example (using Shapely Point and a matplotlib axes):
>> b = Point(0, 0).buffer(1.0)
>> patch = PolygonPatch(b, fc='blue', ec='blue', alpha=0.5)
>> axis.add_patch(patch) | [
"Constructs",
"a",
"matplotlib",
"patch",
"from",
"a",
"geometric",
"object"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/plotting/patch.py#L43-L93 | train | 233,076 |
gem/oq-engine | openquake/hazardlib/gsim/kotha_2019.py | KothaEtAl2019.retreive_sigma_mu_data | def retreive_sigma_mu_data(self):
"""
For the general form of the GMPE this retrieves the sigma mu
values from the hdf5 file using the "general" model, i.e. sigma mu
factors that are independent of the choice of region or depth
"""
fle = h5py.File(os.path.join(BASE_PATH,
"KothaEtAl2019_SigmaMu_Fixed.hdf5"), "r")
self.mags = fle["M"][:]
self.dists = fle["R"][:]
self.periods = fle["T"][:]
self.pga = fle["PGA"][:]
self.pgv = fle["PGV"][:]
self.s_a = fle["SA"][:]
fle.close() | python | def retreive_sigma_mu_data(self):
"""
For the general form of the GMPE this retrieves the sigma mu
values from the hdf5 file using the "general" model, i.e. sigma mu
factors that are independent of the choice of region or depth
"""
fle = h5py.File(os.path.join(BASE_PATH,
"KothaEtAl2019_SigmaMu_Fixed.hdf5"), "r")
self.mags = fle["M"][:]
self.dists = fle["R"][:]
self.periods = fle["T"][:]
self.pga = fle["PGA"][:]
self.pgv = fle["PGV"][:]
self.s_a = fle["SA"][:]
fle.close() | [
"def",
"retreive_sigma_mu_data",
"(",
"self",
")",
":",
"fle",
"=",
"h5py",
".",
"File",
"(",
"os",
".",
"path",
".",
"join",
"(",
"BASE_PATH",
",",
"\"KothaEtAl2019_SigmaMu_Fixed.hdf5\"",
")",
",",
"\"r\"",
")",
"self",
".",
"mags",
"=",
"fle",
"[",
"\"M\"",
"]",
"[",
":",
"]",
"self",
".",
"dists",
"=",
"fle",
"[",
"\"R\"",
"]",
"[",
":",
"]",
"self",
".",
"periods",
"=",
"fle",
"[",
"\"T\"",
"]",
"[",
":",
"]",
"self",
".",
"pga",
"=",
"fle",
"[",
"\"PGA\"",
"]",
"[",
":",
"]",
"self",
".",
"pgv",
"=",
"fle",
"[",
"\"PGV\"",
"]",
"[",
":",
"]",
"self",
".",
"s_a",
"=",
"fle",
"[",
"\"SA\"",
"]",
"[",
":",
"]",
"fle",
".",
"close",
"(",
")"
] | For the general form of the GMPE this retrieves the sigma mu
values from the hdf5 file using the "general" model, i.e. sigma mu
factors that are independent of the choice of region or depth | [
"For",
"the",
"general",
"form",
"of",
"the",
"GMPE",
"this",
"retrieves",
"the",
"sigma",
"mu",
"values",
"from",
"the",
"hdf5",
"file",
"using",
"the",
"general",
"model",
"i",
".",
"e",
".",
"sigma",
"mu",
"factors",
"that",
"are",
"independent",
"of",
"the",
"choice",
"of",
"region",
"or",
"depth"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/kotha_2019.py#L129-L143 | train | 233,077 |
gem/oq-engine | openquake/hazardlib/gsim/kotha_2019.py | KothaEtAl2019.get_magnitude_scaling | def get_magnitude_scaling(self, C, mag):
"""
Returns the magnitude scaling term
"""
d_m = mag - self.CONSTANTS["Mh"]
if mag < self.CONSTANTS["Mh"]:
return C["e1"] + C["b1"] * d_m + C["b2"] * (d_m ** 2.0)
else:
return C["e1"] + C["b3"] * d_m | python | def get_magnitude_scaling(self, C, mag):
"""
Returns the magnitude scaling term
"""
d_m = mag - self.CONSTANTS["Mh"]
if mag < self.CONSTANTS["Mh"]:
return C["e1"] + C["b1"] * d_m + C["b2"] * (d_m ** 2.0)
else:
return C["e1"] + C["b3"] * d_m | [
"def",
"get_magnitude_scaling",
"(",
"self",
",",
"C",
",",
"mag",
")",
":",
"d_m",
"=",
"mag",
"-",
"self",
".",
"CONSTANTS",
"[",
"\"Mh\"",
"]",
"if",
"mag",
"<",
"self",
".",
"CONSTANTS",
"[",
"\"Mh\"",
"]",
":",
"return",
"C",
"[",
"\"e1\"",
"]",
"+",
"C",
"[",
"\"b1\"",
"]",
"*",
"d_m",
"+",
"C",
"[",
"\"b2\"",
"]",
"*",
"(",
"d_m",
"**",
"2.0",
")",
"else",
":",
"return",
"C",
"[",
"\"e1\"",
"]",
"+",
"C",
"[",
"\"b3\"",
"]",
"*",
"d_m"
] | Returns the magnitude scaling term | [
"Returns",
"the",
"magnitude",
"scaling",
"term"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/kotha_2019.py#L174-L182 | train | 233,078 |
gem/oq-engine | openquake/hazardlib/gsim/kotha_2019.py | KothaEtAl2019.get_distance_term | def get_distance_term(self, C, rup, rjb, imt):
"""
Returns the distance attenuation factor
"""
h = self._get_h(C, rup.hypo_depth)
rval = np.sqrt(rjb ** 2. + h ** 2.)
c3 = self.get_distance_coefficients(C, imt)
f_r = (C["c1"] + C["c2"] * (rup.mag - self.CONSTANTS["Mref"])) *\
np.log(rval / self.CONSTANTS["Rref"]) +\
c3 * (rval - self.CONSTANTS["Rref"])
return f_r | python | def get_distance_term(self, C, rup, rjb, imt):
"""
Returns the distance attenuation factor
"""
h = self._get_h(C, rup.hypo_depth)
rval = np.sqrt(rjb ** 2. + h ** 2.)
c3 = self.get_distance_coefficients(C, imt)
f_r = (C["c1"] + C["c2"] * (rup.mag - self.CONSTANTS["Mref"])) *\
np.log(rval / self.CONSTANTS["Rref"]) +\
c3 * (rval - self.CONSTANTS["Rref"])
return f_r | [
"def",
"get_distance_term",
"(",
"self",
",",
"C",
",",
"rup",
",",
"rjb",
",",
"imt",
")",
":",
"h",
"=",
"self",
".",
"_get_h",
"(",
"C",
",",
"rup",
".",
"hypo_depth",
")",
"rval",
"=",
"np",
".",
"sqrt",
"(",
"rjb",
"**",
"2.",
"+",
"h",
"**",
"2.",
")",
"c3",
"=",
"self",
".",
"get_distance_coefficients",
"(",
"C",
",",
"imt",
")",
"f_r",
"=",
"(",
"C",
"[",
"\"c1\"",
"]",
"+",
"C",
"[",
"\"c2\"",
"]",
"*",
"(",
"rup",
".",
"mag",
"-",
"self",
".",
"CONSTANTS",
"[",
"\"Mref\"",
"]",
")",
")",
"*",
"np",
".",
"log",
"(",
"rval",
"/",
"self",
".",
"CONSTANTS",
"[",
"\"Rref\"",
"]",
")",
"+",
"c3",
"*",
"(",
"rval",
"-",
"self",
".",
"CONSTANTS",
"[",
"\"Rref\"",
"]",
")",
"return",
"f_r"
] | Returns the distance attenuation factor | [
"Returns",
"the",
"distance",
"attenuation",
"factor"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/kotha_2019.py#L184-L195 | train | 233,079 |
gem/oq-engine | openquake/hazardlib/gsim/kotha_2019.py | KothaEtAl2019.get_distance_coefficients | def get_distance_coefficients(self, C, imt):
"""
Returns the c3 term
"""
c3 = self.c3[imt]["c3"] if self.c3 else C["c3"]
return c3 | python | def get_distance_coefficients(self, C, imt):
"""
Returns the c3 term
"""
c3 = self.c3[imt]["c3"] if self.c3 else C["c3"]
return c3 | [
"def",
"get_distance_coefficients",
"(",
"self",
",",
"C",
",",
"imt",
")",
":",
"c3",
"=",
"self",
".",
"c3",
"[",
"imt",
"]",
"[",
"\"c3\"",
"]",
"if",
"self",
".",
"c3",
"else",
"C",
"[",
"\"c3\"",
"]",
"return",
"c3"
] | Returns the c3 term | [
"Returns",
"the",
"c3",
"term"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/kotha_2019.py#L208-L213 | train | 233,080 |
gem/oq-engine | openquake/hazardlib/gsim/kotha_2019.py | KothaEtAl2019.get_sigma_mu_adjustment | def get_sigma_mu_adjustment(self, C, imt, rup, dists):
"""
Returns the sigma mu adjustment factor
"""
if imt.name in "PGA PGV":
# PGA and PGV are 2D arrays of dimension [nmags, ndists]
sigma_mu = getattr(self, imt.name.lower())
if rup.mag <= self.mags[0]:
sigma_mu_m = sigma_mu[0, :]
elif rup.mag >= self.mags[-1]:
sigma_mu_m = sigma_mu[-1, :]
else:
intpl1 = interp1d(self.mags, sigma_mu, axis=0)
sigma_mu_m = intpl1(rup.mag)
# Linear interpolation with distance
intpl2 = interp1d(self.dists, sigma_mu_m, bounds_error=False,
fill_value=(sigma_mu_m[0], sigma_mu_m[-1]))
return intpl2(dists.rjb)
# In the case of SA the array is of dimension [nmags, ndists, nperiods]
# Get values for given magnitude
if rup.mag <= self.mags[0]:
sigma_mu_m = self.s_a[0, :, :]
elif rup.mag >= self.mags[-1]:
sigma_mu_m = self.s_a[-1, :, :]
else:
intpl1 = interp1d(self.mags, self.s_a, axis=0)
sigma_mu_m = intpl1(rup.mag)
# Get values for period - N.B. ln T, linear sigma mu interpolation
if imt.period <= self.periods[0]:
sigma_mu_t = sigma_mu_m[:, 0]
elif imt.period >= self.periods[-1]:
sigma_mu_t = sigma_mu_m[:, -1]
else:
intpl2 = interp1d(np.log(self.periods), sigma_mu_m, axis=1)
sigma_mu_t = intpl2(np.log(imt.period))
intpl3 = interp1d(self.dists, sigma_mu_t, bounds_error=False,
fill_value=(sigma_mu_t[0], sigma_mu_t[-1]))
return intpl3(dists.rjb) | python | def get_sigma_mu_adjustment(self, C, imt, rup, dists):
"""
Returns the sigma mu adjustment factor
"""
if imt.name in "PGA PGV":
# PGA and PGV are 2D arrays of dimension [nmags, ndists]
sigma_mu = getattr(self, imt.name.lower())
if rup.mag <= self.mags[0]:
sigma_mu_m = sigma_mu[0, :]
elif rup.mag >= self.mags[-1]:
sigma_mu_m = sigma_mu[-1, :]
else:
intpl1 = interp1d(self.mags, sigma_mu, axis=0)
sigma_mu_m = intpl1(rup.mag)
# Linear interpolation with distance
intpl2 = interp1d(self.dists, sigma_mu_m, bounds_error=False,
fill_value=(sigma_mu_m[0], sigma_mu_m[-1]))
return intpl2(dists.rjb)
# In the case of SA the array is of dimension [nmags, ndists, nperiods]
# Get values for given magnitude
if rup.mag <= self.mags[0]:
sigma_mu_m = self.s_a[0, :, :]
elif rup.mag >= self.mags[-1]:
sigma_mu_m = self.s_a[-1, :, :]
else:
intpl1 = interp1d(self.mags, self.s_a, axis=0)
sigma_mu_m = intpl1(rup.mag)
# Get values for period - N.B. ln T, linear sigma mu interpolation
if imt.period <= self.periods[0]:
sigma_mu_t = sigma_mu_m[:, 0]
elif imt.period >= self.periods[-1]:
sigma_mu_t = sigma_mu_m[:, -1]
else:
intpl2 = interp1d(np.log(self.periods), sigma_mu_m, axis=1)
sigma_mu_t = intpl2(np.log(imt.period))
intpl3 = interp1d(self.dists, sigma_mu_t, bounds_error=False,
fill_value=(sigma_mu_t[0], sigma_mu_t[-1]))
return intpl3(dists.rjb) | [
"def",
"get_sigma_mu_adjustment",
"(",
"self",
",",
"C",
",",
"imt",
",",
"rup",
",",
"dists",
")",
":",
"if",
"imt",
".",
"name",
"in",
"\"PGA PGV\"",
":",
"# PGA and PGV are 2D arrays of dimension [nmags, ndists]",
"sigma_mu",
"=",
"getattr",
"(",
"self",
",",
"imt",
".",
"name",
".",
"lower",
"(",
")",
")",
"if",
"rup",
".",
"mag",
"<=",
"self",
".",
"mags",
"[",
"0",
"]",
":",
"sigma_mu_m",
"=",
"sigma_mu",
"[",
"0",
",",
":",
"]",
"elif",
"rup",
".",
"mag",
">=",
"self",
".",
"mags",
"[",
"-",
"1",
"]",
":",
"sigma_mu_m",
"=",
"sigma_mu",
"[",
"-",
"1",
",",
":",
"]",
"else",
":",
"intpl1",
"=",
"interp1d",
"(",
"self",
".",
"mags",
",",
"sigma_mu",
",",
"axis",
"=",
"0",
")",
"sigma_mu_m",
"=",
"intpl1",
"(",
"rup",
".",
"mag",
")",
"# Linear interpolation with distance",
"intpl2",
"=",
"interp1d",
"(",
"self",
".",
"dists",
",",
"sigma_mu_m",
",",
"bounds_error",
"=",
"False",
",",
"fill_value",
"=",
"(",
"sigma_mu_m",
"[",
"0",
"]",
",",
"sigma_mu_m",
"[",
"-",
"1",
"]",
")",
")",
"return",
"intpl2",
"(",
"dists",
".",
"rjb",
")",
"# In the case of SA the array is of dimension [nmags, ndists, nperiods]",
"# Get values for given magnitude",
"if",
"rup",
".",
"mag",
"<=",
"self",
".",
"mags",
"[",
"0",
"]",
":",
"sigma_mu_m",
"=",
"self",
".",
"s_a",
"[",
"0",
",",
":",
",",
":",
"]",
"elif",
"rup",
".",
"mag",
">=",
"self",
".",
"mags",
"[",
"-",
"1",
"]",
":",
"sigma_mu_m",
"=",
"self",
".",
"s_a",
"[",
"-",
"1",
",",
":",
",",
":",
"]",
"else",
":",
"intpl1",
"=",
"interp1d",
"(",
"self",
".",
"mags",
",",
"self",
".",
"s_a",
",",
"axis",
"=",
"0",
")",
"sigma_mu_m",
"=",
"intpl1",
"(",
"rup",
".",
"mag",
")",
"# Get values for period - N.B. ln T, linear sigma mu interpolation",
"if",
"imt",
".",
"period",
"<=",
"self",
".",
"periods",
"[",
"0",
"]",
":",
"sigma_mu_t",
"=",
"sigma_mu_m",
"[",
":",
",",
"0",
"]",
"elif",
"imt",
".",
"period",
">=",
"self",
".",
"periods",
"[",
"-",
"1",
"]",
":",
"sigma_mu_t",
"=",
"sigma_mu_m",
"[",
":",
",",
"-",
"1",
"]",
"else",
":",
"intpl2",
"=",
"interp1d",
"(",
"np",
".",
"log",
"(",
"self",
".",
"periods",
")",
",",
"sigma_mu_m",
",",
"axis",
"=",
"1",
")",
"sigma_mu_t",
"=",
"intpl2",
"(",
"np",
".",
"log",
"(",
"imt",
".",
"period",
")",
")",
"intpl3",
"=",
"interp1d",
"(",
"self",
".",
"dists",
",",
"sigma_mu_t",
",",
"bounds_error",
"=",
"False",
",",
"fill_value",
"=",
"(",
"sigma_mu_t",
"[",
"0",
"]",
",",
"sigma_mu_t",
"[",
"-",
"1",
"]",
")",
")",
"return",
"intpl3",
"(",
"dists",
".",
"rjb",
")"
] | Returns the sigma mu adjustment factor | [
"Returns",
"the",
"sigma",
"mu",
"adjustment",
"factor"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/kotha_2019.py#L221-L258 | train | 233,081 |
gem/oq-engine | openquake/hazardlib/gsim/kotha_2019.py | KothaEtAl2019SERA.get_site_amplification | def get_site_amplification(self, C, sites):
"""
Returns the linear site amplification term depending on whether the
Vs30 is observed of inferred
"""
ampl = np.zeros(sites.vs30.shape)
# For observed vs30 sites
ampl[sites.vs30measured] = (C["d0_obs"] + C["d1_obs"] *
np.log(sites.vs30[sites.vs30measured]))
# For inferred Vs30 sites
idx = np.logical_not(sites.vs30measured)
ampl[idx] = (C["d0_inf"] + C["d1_inf"] * np.log(sites.vs30[idx]))
return ampl | python | def get_site_amplification(self, C, sites):
"""
Returns the linear site amplification term depending on whether the
Vs30 is observed of inferred
"""
ampl = np.zeros(sites.vs30.shape)
# For observed vs30 sites
ampl[sites.vs30measured] = (C["d0_obs"] + C["d1_obs"] *
np.log(sites.vs30[sites.vs30measured]))
# For inferred Vs30 sites
idx = np.logical_not(sites.vs30measured)
ampl[idx] = (C["d0_inf"] + C["d1_inf"] * np.log(sites.vs30[idx]))
return ampl | [
"def",
"get_site_amplification",
"(",
"self",
",",
"C",
",",
"sites",
")",
":",
"ampl",
"=",
"np",
".",
"zeros",
"(",
"sites",
".",
"vs30",
".",
"shape",
")",
"# For observed vs30 sites",
"ampl",
"[",
"sites",
".",
"vs30measured",
"]",
"=",
"(",
"C",
"[",
"\"d0_obs\"",
"]",
"+",
"C",
"[",
"\"d1_obs\"",
"]",
"*",
"np",
".",
"log",
"(",
"sites",
".",
"vs30",
"[",
"sites",
".",
"vs30measured",
"]",
")",
")",
"# For inferred Vs30 sites",
"idx",
"=",
"np",
".",
"logical_not",
"(",
"sites",
".",
"vs30measured",
")",
"ampl",
"[",
"idx",
"]",
"=",
"(",
"C",
"[",
"\"d0_inf\"",
"]",
"+",
"C",
"[",
"\"d1_inf\"",
"]",
"*",
"np",
".",
"log",
"(",
"sites",
".",
"vs30",
"[",
"idx",
"]",
")",
")",
"return",
"ampl"
] | Returns the linear site amplification term depending on whether the
Vs30 is observed of inferred | [
"Returns",
"the",
"linear",
"site",
"amplification",
"term",
"depending",
"on",
"whether",
"the",
"Vs30",
"is",
"observed",
"of",
"inferred"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/kotha_2019.py#L332-L344 | train | 233,082 |
gem/oq-engine | openquake/hazardlib/gsim/kotha_2019.py | KothaEtAl2019SERA.get_stddevs | def get_stddevs(self, C, stddev_shape, stddev_types, sites):
"""
Returns the standard deviations, with different site standard
deviation for inferred vs. observed vs30 sites.
"""
stddevs = []
tau = C["tau_event"]
sigma_s = np.zeros(sites.vs30measured.shape, dtype=float)
sigma_s[sites.vs30measured] += C["sigma_s_obs"]
sigma_s[np.logical_not(sites.vs30measured)] += C["sigma_s_inf"]
phi = np.sqrt(C["phi0"] ** 2.0 + sigma_s ** 2.)
for stddev_type in stddev_types:
assert stddev_type in self.DEFINED_FOR_STANDARD_DEVIATION_TYPES
if stddev_type == const.StdDev.TOTAL:
stddevs.append(np.sqrt(tau ** 2. + phi ** 2.) +
np.zeros(stddev_shape))
elif stddev_type == const.StdDev.INTRA_EVENT:
stddevs.append(phi + np.zeros(stddev_shape))
elif stddev_type == const.StdDev.INTER_EVENT:
stddevs.append(tau + np.zeros(stddev_shape))
return stddevs | python | def get_stddevs(self, C, stddev_shape, stddev_types, sites):
"""
Returns the standard deviations, with different site standard
deviation for inferred vs. observed vs30 sites.
"""
stddevs = []
tau = C["tau_event"]
sigma_s = np.zeros(sites.vs30measured.shape, dtype=float)
sigma_s[sites.vs30measured] += C["sigma_s_obs"]
sigma_s[np.logical_not(sites.vs30measured)] += C["sigma_s_inf"]
phi = np.sqrt(C["phi0"] ** 2.0 + sigma_s ** 2.)
for stddev_type in stddev_types:
assert stddev_type in self.DEFINED_FOR_STANDARD_DEVIATION_TYPES
if stddev_type == const.StdDev.TOTAL:
stddevs.append(np.sqrt(tau ** 2. + phi ** 2.) +
np.zeros(stddev_shape))
elif stddev_type == const.StdDev.INTRA_EVENT:
stddevs.append(phi + np.zeros(stddev_shape))
elif stddev_type == const.StdDev.INTER_EVENT:
stddevs.append(tau + np.zeros(stddev_shape))
return stddevs | [
"def",
"get_stddevs",
"(",
"self",
",",
"C",
",",
"stddev_shape",
",",
"stddev_types",
",",
"sites",
")",
":",
"stddevs",
"=",
"[",
"]",
"tau",
"=",
"C",
"[",
"\"tau_event\"",
"]",
"sigma_s",
"=",
"np",
".",
"zeros",
"(",
"sites",
".",
"vs30measured",
".",
"shape",
",",
"dtype",
"=",
"float",
")",
"sigma_s",
"[",
"sites",
".",
"vs30measured",
"]",
"+=",
"C",
"[",
"\"sigma_s_obs\"",
"]",
"sigma_s",
"[",
"np",
".",
"logical_not",
"(",
"sites",
".",
"vs30measured",
")",
"]",
"+=",
"C",
"[",
"\"sigma_s_inf\"",
"]",
"phi",
"=",
"np",
".",
"sqrt",
"(",
"C",
"[",
"\"phi0\"",
"]",
"**",
"2.0",
"+",
"sigma_s",
"**",
"2.",
")",
"for",
"stddev_type",
"in",
"stddev_types",
":",
"assert",
"stddev_type",
"in",
"self",
".",
"DEFINED_FOR_STANDARD_DEVIATION_TYPES",
"if",
"stddev_type",
"==",
"const",
".",
"StdDev",
".",
"TOTAL",
":",
"stddevs",
".",
"append",
"(",
"np",
".",
"sqrt",
"(",
"tau",
"**",
"2.",
"+",
"phi",
"**",
"2.",
")",
"+",
"np",
".",
"zeros",
"(",
"stddev_shape",
")",
")",
"elif",
"stddev_type",
"==",
"const",
".",
"StdDev",
".",
"INTRA_EVENT",
":",
"stddevs",
".",
"append",
"(",
"phi",
"+",
"np",
".",
"zeros",
"(",
"stddev_shape",
")",
")",
"elif",
"stddev_type",
"==",
"const",
".",
"StdDev",
".",
"INTER_EVENT",
":",
"stddevs",
".",
"append",
"(",
"tau",
"+",
"np",
".",
"zeros",
"(",
"stddev_shape",
")",
")",
"return",
"stddevs"
] | Returns the standard deviations, with different site standard
deviation for inferred vs. observed vs30 sites. | [
"Returns",
"the",
"standard",
"deviations",
"with",
"different",
"site",
"standard",
"deviation",
"for",
"inferred",
"vs",
".",
"observed",
"vs30",
"sites",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/kotha_2019.py#L346-L366 | train | 233,083 |
gem/oq-engine | openquake/hazardlib/geo/geodetic.py | geodetic_distance | def geodetic_distance(lons1, lats1, lons2, lats2, diameter=2*EARTH_RADIUS):
"""
Calculate the geodetic distance between two points or two collections
of points.
Parameters are coordinates in decimal degrees. They could be scalar
float numbers or numpy arrays, in which case they should "broadcast
together".
Implements http://williams.best.vwh.net/avform.htm#Dist
:returns:
Distance in km, floating point scalar or numpy array of such.
"""
lons1, lats1, lons2, lats2 = _prepare_coords(lons1, lats1, lons2, lats2)
distance = numpy.arcsin(numpy.sqrt(
numpy.sin((lats1 - lats2) / 2.0) ** 2.0
+ numpy.cos(lats1) * numpy.cos(lats2)
* numpy.sin((lons1 - lons2) / 2.0) ** 2.0
))
return diameter * distance | python | def geodetic_distance(lons1, lats1, lons2, lats2, diameter=2*EARTH_RADIUS):
"""
Calculate the geodetic distance between two points or two collections
of points.
Parameters are coordinates in decimal degrees. They could be scalar
float numbers or numpy arrays, in which case they should "broadcast
together".
Implements http://williams.best.vwh.net/avform.htm#Dist
:returns:
Distance in km, floating point scalar or numpy array of such.
"""
lons1, lats1, lons2, lats2 = _prepare_coords(lons1, lats1, lons2, lats2)
distance = numpy.arcsin(numpy.sqrt(
numpy.sin((lats1 - lats2) / 2.0) ** 2.0
+ numpy.cos(lats1) * numpy.cos(lats2)
* numpy.sin((lons1 - lons2) / 2.0) ** 2.0
))
return diameter * distance | [
"def",
"geodetic_distance",
"(",
"lons1",
",",
"lats1",
",",
"lons2",
",",
"lats2",
",",
"diameter",
"=",
"2",
"*",
"EARTH_RADIUS",
")",
":",
"lons1",
",",
"lats1",
",",
"lons2",
",",
"lats2",
"=",
"_prepare_coords",
"(",
"lons1",
",",
"lats1",
",",
"lons2",
",",
"lats2",
")",
"distance",
"=",
"numpy",
".",
"arcsin",
"(",
"numpy",
".",
"sqrt",
"(",
"numpy",
".",
"sin",
"(",
"(",
"lats1",
"-",
"lats2",
")",
"/",
"2.0",
")",
"**",
"2.0",
"+",
"numpy",
".",
"cos",
"(",
"lats1",
")",
"*",
"numpy",
".",
"cos",
"(",
"lats2",
")",
"*",
"numpy",
".",
"sin",
"(",
"(",
"lons1",
"-",
"lons2",
")",
"/",
"2.0",
")",
"**",
"2.0",
")",
")",
"return",
"diameter",
"*",
"distance"
] | Calculate the geodetic distance between two points or two collections
of points.
Parameters are coordinates in decimal degrees. They could be scalar
float numbers or numpy arrays, in which case they should "broadcast
together".
Implements http://williams.best.vwh.net/avform.htm#Dist
:returns:
Distance in km, floating point scalar or numpy array of such. | [
"Calculate",
"the",
"geodetic",
"distance",
"between",
"two",
"points",
"or",
"two",
"collections",
"of",
"points",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/geodetic.py#L34-L54 | train | 233,084 |
gem/oq-engine | openquake/hazardlib/geo/geodetic.py | azimuth | def azimuth(lons1, lats1, lons2, lats2):
"""
Calculate the azimuth between two points or two collections of points.
Parameters are the same as for :func:`geodetic_distance`.
Implements an "alternative formula" from
http://williams.best.vwh.net/avform.htm#Crs
:returns:
Azimuth as an angle between direction to north from first point and
direction to the second point measured clockwise in decimal degrees.
"""
lons1, lats1, lons2, lats2 = _prepare_coords(lons1, lats1, lons2, lats2)
cos_lat2 = numpy.cos(lats2)
true_course = numpy.degrees(numpy.arctan2(
numpy.sin(lons1 - lons2) * cos_lat2,
numpy.cos(lats1) * numpy.sin(lats2)
- numpy.sin(lats1) * cos_lat2 * numpy.cos(lons1 - lons2)
))
return (360 - true_course) % 360 | python | def azimuth(lons1, lats1, lons2, lats2):
"""
Calculate the azimuth between two points or two collections of points.
Parameters are the same as for :func:`geodetic_distance`.
Implements an "alternative formula" from
http://williams.best.vwh.net/avform.htm#Crs
:returns:
Azimuth as an angle between direction to north from first point and
direction to the second point measured clockwise in decimal degrees.
"""
lons1, lats1, lons2, lats2 = _prepare_coords(lons1, lats1, lons2, lats2)
cos_lat2 = numpy.cos(lats2)
true_course = numpy.degrees(numpy.arctan2(
numpy.sin(lons1 - lons2) * cos_lat2,
numpy.cos(lats1) * numpy.sin(lats2)
- numpy.sin(lats1) * cos_lat2 * numpy.cos(lons1 - lons2)
))
return (360 - true_course) % 360 | [
"def",
"azimuth",
"(",
"lons1",
",",
"lats1",
",",
"lons2",
",",
"lats2",
")",
":",
"lons1",
",",
"lats1",
",",
"lons2",
",",
"lats2",
"=",
"_prepare_coords",
"(",
"lons1",
",",
"lats1",
",",
"lons2",
",",
"lats2",
")",
"cos_lat2",
"=",
"numpy",
".",
"cos",
"(",
"lats2",
")",
"true_course",
"=",
"numpy",
".",
"degrees",
"(",
"numpy",
".",
"arctan2",
"(",
"numpy",
".",
"sin",
"(",
"lons1",
"-",
"lons2",
")",
"*",
"cos_lat2",
",",
"numpy",
".",
"cos",
"(",
"lats1",
")",
"*",
"numpy",
".",
"sin",
"(",
"lats2",
")",
"-",
"numpy",
".",
"sin",
"(",
"lats1",
")",
"*",
"cos_lat2",
"*",
"numpy",
".",
"cos",
"(",
"lons1",
"-",
"lons2",
")",
")",
")",
"return",
"(",
"360",
"-",
"true_course",
")",
"%",
"360"
] | Calculate the azimuth between two points or two collections of points.
Parameters are the same as for :func:`geodetic_distance`.
Implements an "alternative formula" from
http://williams.best.vwh.net/avform.htm#Crs
:returns:
Azimuth as an angle between direction to north from first point and
direction to the second point measured clockwise in decimal degrees. | [
"Calculate",
"the",
"azimuth",
"between",
"two",
"points",
"or",
"two",
"collections",
"of",
"points",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/geodetic.py#L57-L77 | train | 233,085 |
gem/oq-engine | openquake/hazardlib/geo/geodetic.py | min_distance_to_segment | def min_distance_to_segment(seglons, seglats, lons, lats):
"""
This function computes the shortest distance to a segment in a 2D reference
system.
:parameter seglons:
A list or an array of floats specifying the longitude values of the two
vertexes delimiting the segment.
:parameter seglats:
A list or an array of floats specifying the latitude values of the two
vertexes delimiting the segment.
:parameter lons:
A list or a 1D array of floats specifying the longitude values of the
points for which the calculation of the shortest distance is requested.
:parameter lats:
A list or a 1D array of floats specifying the latitude values of the
points for which the calculation of the shortest distance is requested.
:returns:
An array of the same shape as lons which contains for each point
defined by (lons, lats) the shortest distance to the segment.
Distances are negative for those points that stay on the 'left side'
of the segment direction and whose projection lies within the segment
edges. For all other points, distance is positive.
"""
# Check the size of the seglons, seglats arrays
assert len(seglons) == len(seglats) == 2
# Compute the azimuth of the segment
seg_azim = azimuth(seglons[0], seglats[0], seglons[1], seglats[1])
# Compute the azimuth of the direction obtained
# connecting the first point defining the segment and each site
azimuth1 = azimuth(seglons[0], seglats[0], lons, lats)
# Compute the azimuth of the direction obtained
# connecting the second point defining the segment and each site
azimuth2 = azimuth(seglons[1], seglats[1], lons, lats)
# Find the points inside the band defined by the two lines perpendicular
# to the segment direction passing through the two vertexes of the segment.
# For these points the closest distance is the distance from the great arc.
idx_in = numpy.nonzero(
(numpy.cos(numpy.radians(seg_azim-azimuth1)) >= 0.0) &
(numpy.cos(numpy.radians(seg_azim-azimuth2)) <= 0.0))
# Find the points outside the band defined by the two line perpendicular
# to the segment direction passing through the two vertexes of the segment.
# For these points the closest distance is the minimum of the distance from
# the two point vertexes.
idx_out = numpy.nonzero(
(numpy.cos(numpy.radians(seg_azim-azimuth1)) < 0.0) |
(numpy.cos(numpy.radians(seg_azim-azimuth2)) > 0.0))
# Find the indexes of points 'on the left of the segment'
idx_neg = numpy.nonzero(numpy.sin(numpy.radians(
(azimuth1-seg_azim))) < 0.0)
# Now let's compute the distances for the two cases.
dists = numpy.zeros_like(lons)
if len(idx_in[0]):
dists[idx_in] = distance_to_arc(
seglons[0], seglats[0], seg_azim, lons[idx_in], lats[idx_in])
if len(idx_out[0]):
dists[idx_out] = min_geodetic_distance(
(seglons, seglats), (lons[idx_out], lats[idx_out]))
# Finally we correct the sign of the distances in order to make sure that
# the points on the right semispace defined using as a reference the
# direction defined by the segment (i.e. the direction defined by going
# from the first point to the second one) have a positive distance and
# the others a negative one.
dists = abs(dists)
dists[idx_neg] = - dists[idx_neg]
return dists | python | def min_distance_to_segment(seglons, seglats, lons, lats):
"""
This function computes the shortest distance to a segment in a 2D reference
system.
:parameter seglons:
A list or an array of floats specifying the longitude values of the two
vertexes delimiting the segment.
:parameter seglats:
A list or an array of floats specifying the latitude values of the two
vertexes delimiting the segment.
:parameter lons:
A list or a 1D array of floats specifying the longitude values of the
points for which the calculation of the shortest distance is requested.
:parameter lats:
A list or a 1D array of floats specifying the latitude values of the
points for which the calculation of the shortest distance is requested.
:returns:
An array of the same shape as lons which contains for each point
defined by (lons, lats) the shortest distance to the segment.
Distances are negative for those points that stay on the 'left side'
of the segment direction and whose projection lies within the segment
edges. For all other points, distance is positive.
"""
# Check the size of the seglons, seglats arrays
assert len(seglons) == len(seglats) == 2
# Compute the azimuth of the segment
seg_azim = azimuth(seglons[0], seglats[0], seglons[1], seglats[1])
# Compute the azimuth of the direction obtained
# connecting the first point defining the segment and each site
azimuth1 = azimuth(seglons[0], seglats[0], lons, lats)
# Compute the azimuth of the direction obtained
# connecting the second point defining the segment and each site
azimuth2 = azimuth(seglons[1], seglats[1], lons, lats)
# Find the points inside the band defined by the two lines perpendicular
# to the segment direction passing through the two vertexes of the segment.
# For these points the closest distance is the distance from the great arc.
idx_in = numpy.nonzero(
(numpy.cos(numpy.radians(seg_azim-azimuth1)) >= 0.0) &
(numpy.cos(numpy.radians(seg_azim-azimuth2)) <= 0.0))
# Find the points outside the band defined by the two line perpendicular
# to the segment direction passing through the two vertexes of the segment.
# For these points the closest distance is the minimum of the distance from
# the two point vertexes.
idx_out = numpy.nonzero(
(numpy.cos(numpy.radians(seg_azim-azimuth1)) < 0.0) |
(numpy.cos(numpy.radians(seg_azim-azimuth2)) > 0.0))
# Find the indexes of points 'on the left of the segment'
idx_neg = numpy.nonzero(numpy.sin(numpy.radians(
(azimuth1-seg_azim))) < 0.0)
# Now let's compute the distances for the two cases.
dists = numpy.zeros_like(lons)
if len(idx_in[0]):
dists[idx_in] = distance_to_arc(
seglons[0], seglats[0], seg_azim, lons[idx_in], lats[idx_in])
if len(idx_out[0]):
dists[idx_out] = min_geodetic_distance(
(seglons, seglats), (lons[idx_out], lats[idx_out]))
# Finally we correct the sign of the distances in order to make sure that
# the points on the right semispace defined using as a reference the
# direction defined by the segment (i.e. the direction defined by going
# from the first point to the second one) have a positive distance and
# the others a negative one.
dists = abs(dists)
dists[idx_neg] = - dists[idx_neg]
return dists | [
"def",
"min_distance_to_segment",
"(",
"seglons",
",",
"seglats",
",",
"lons",
",",
"lats",
")",
":",
"# Check the size of the seglons, seglats arrays",
"assert",
"len",
"(",
"seglons",
")",
"==",
"len",
"(",
"seglats",
")",
"==",
"2",
"# Compute the azimuth of the segment",
"seg_azim",
"=",
"azimuth",
"(",
"seglons",
"[",
"0",
"]",
",",
"seglats",
"[",
"0",
"]",
",",
"seglons",
"[",
"1",
"]",
",",
"seglats",
"[",
"1",
"]",
")",
"# Compute the azimuth of the direction obtained",
"# connecting the first point defining the segment and each site",
"azimuth1",
"=",
"azimuth",
"(",
"seglons",
"[",
"0",
"]",
",",
"seglats",
"[",
"0",
"]",
",",
"lons",
",",
"lats",
")",
"# Compute the azimuth of the direction obtained",
"# connecting the second point defining the segment and each site",
"azimuth2",
"=",
"azimuth",
"(",
"seglons",
"[",
"1",
"]",
",",
"seglats",
"[",
"1",
"]",
",",
"lons",
",",
"lats",
")",
"# Find the points inside the band defined by the two lines perpendicular",
"# to the segment direction passing through the two vertexes of the segment.",
"# For these points the closest distance is the distance from the great arc.",
"idx_in",
"=",
"numpy",
".",
"nonzero",
"(",
"(",
"numpy",
".",
"cos",
"(",
"numpy",
".",
"radians",
"(",
"seg_azim",
"-",
"azimuth1",
")",
")",
">=",
"0.0",
")",
"&",
"(",
"numpy",
".",
"cos",
"(",
"numpy",
".",
"radians",
"(",
"seg_azim",
"-",
"azimuth2",
")",
")",
"<=",
"0.0",
")",
")",
"# Find the points outside the band defined by the two line perpendicular",
"# to the segment direction passing through the two vertexes of the segment.",
"# For these points the closest distance is the minimum of the distance from",
"# the two point vertexes.",
"idx_out",
"=",
"numpy",
".",
"nonzero",
"(",
"(",
"numpy",
".",
"cos",
"(",
"numpy",
".",
"radians",
"(",
"seg_azim",
"-",
"azimuth1",
")",
")",
"<",
"0.0",
")",
"|",
"(",
"numpy",
".",
"cos",
"(",
"numpy",
".",
"radians",
"(",
"seg_azim",
"-",
"azimuth2",
")",
")",
">",
"0.0",
")",
")",
"# Find the indexes of points 'on the left of the segment'",
"idx_neg",
"=",
"numpy",
".",
"nonzero",
"(",
"numpy",
".",
"sin",
"(",
"numpy",
".",
"radians",
"(",
"(",
"azimuth1",
"-",
"seg_azim",
")",
")",
")",
"<",
"0.0",
")",
"# Now let's compute the distances for the two cases.",
"dists",
"=",
"numpy",
".",
"zeros_like",
"(",
"lons",
")",
"if",
"len",
"(",
"idx_in",
"[",
"0",
"]",
")",
":",
"dists",
"[",
"idx_in",
"]",
"=",
"distance_to_arc",
"(",
"seglons",
"[",
"0",
"]",
",",
"seglats",
"[",
"0",
"]",
",",
"seg_azim",
",",
"lons",
"[",
"idx_in",
"]",
",",
"lats",
"[",
"idx_in",
"]",
")",
"if",
"len",
"(",
"idx_out",
"[",
"0",
"]",
")",
":",
"dists",
"[",
"idx_out",
"]",
"=",
"min_geodetic_distance",
"(",
"(",
"seglons",
",",
"seglats",
")",
",",
"(",
"lons",
"[",
"idx_out",
"]",
",",
"lats",
"[",
"idx_out",
"]",
")",
")",
"# Finally we correct the sign of the distances in order to make sure that",
"# the points on the right semispace defined using as a reference the",
"# direction defined by the segment (i.e. the direction defined by going",
"# from the first point to the second one) have a positive distance and",
"# the others a negative one.",
"dists",
"=",
"abs",
"(",
"dists",
")",
"dists",
"[",
"idx_neg",
"]",
"=",
"-",
"dists",
"[",
"idx_neg",
"]",
"return",
"dists"
] | This function computes the shortest distance to a segment in a 2D reference
system.
:parameter seglons:
A list or an array of floats specifying the longitude values of the two
vertexes delimiting the segment.
:parameter seglats:
A list or an array of floats specifying the latitude values of the two
vertexes delimiting the segment.
:parameter lons:
A list or a 1D array of floats specifying the longitude values of the
points for which the calculation of the shortest distance is requested.
:parameter lats:
A list or a 1D array of floats specifying the latitude values of the
points for which the calculation of the shortest distance is requested.
:returns:
An array of the same shape as lons which contains for each point
defined by (lons, lats) the shortest distance to the segment.
Distances are negative for those points that stay on the 'left side'
of the segment direction and whose projection lies within the segment
edges. For all other points, distance is positive. | [
"This",
"function",
"computes",
"the",
"shortest",
"distance",
"to",
"a",
"segment",
"in",
"a",
"2D",
"reference",
"system",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/geodetic.py#L99-L174 | train | 233,086 |
gem/oq-engine | openquake/hazardlib/geo/geodetic.py | min_geodetic_distance | def min_geodetic_distance(a, b):
"""
Compute the minimum distance between first mesh and each point
of the second mesh when both are defined on the earth surface.
:param a: a pair of (lons, lats) or an array of cartesian coordinates
:param b: a pair of (lons, lats) or an array of cartesian coordinates
"""
if isinstance(a, tuple):
a = spherical_to_cartesian(a[0].flatten(), a[1].flatten())
if isinstance(b, tuple):
b = spherical_to_cartesian(b[0].flatten(), b[1].flatten())
return cdist(a, b).min(axis=0) | python | def min_geodetic_distance(a, b):
"""
Compute the minimum distance between first mesh and each point
of the second mesh when both are defined on the earth surface.
:param a: a pair of (lons, lats) or an array of cartesian coordinates
:param b: a pair of (lons, lats) or an array of cartesian coordinates
"""
if isinstance(a, tuple):
a = spherical_to_cartesian(a[0].flatten(), a[1].flatten())
if isinstance(b, tuple):
b = spherical_to_cartesian(b[0].flatten(), b[1].flatten())
return cdist(a, b).min(axis=0) | [
"def",
"min_geodetic_distance",
"(",
"a",
",",
"b",
")",
":",
"if",
"isinstance",
"(",
"a",
",",
"tuple",
")",
":",
"a",
"=",
"spherical_to_cartesian",
"(",
"a",
"[",
"0",
"]",
".",
"flatten",
"(",
")",
",",
"a",
"[",
"1",
"]",
".",
"flatten",
"(",
")",
")",
"if",
"isinstance",
"(",
"b",
",",
"tuple",
")",
":",
"b",
"=",
"spherical_to_cartesian",
"(",
"b",
"[",
"0",
"]",
".",
"flatten",
"(",
")",
",",
"b",
"[",
"1",
"]",
".",
"flatten",
"(",
")",
")",
"return",
"cdist",
"(",
"a",
",",
"b",
")",
".",
"min",
"(",
"axis",
"=",
"0",
")"
] | Compute the minimum distance between first mesh and each point
of the second mesh when both are defined on the earth surface.
:param a: a pair of (lons, lats) or an array of cartesian coordinates
:param b: a pair of (lons, lats) or an array of cartesian coordinates | [
"Compute",
"the",
"minimum",
"distance",
"between",
"first",
"mesh",
"and",
"each",
"point",
"of",
"the",
"second",
"mesh",
"when",
"both",
"are",
"defined",
"on",
"the",
"earth",
"surface",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/geodetic.py#L224-L236 | train | 233,087 |
gem/oq-engine | openquake/hazardlib/geo/geodetic.py | intervals_between | def intervals_between(lon1, lat1, depth1, lon2, lat2, depth2, length):
"""
Find a list of points between two given ones that lie on the same
great circle arc and are equally spaced by ``length`` km.
:param float lon1, lat1, depth1:
Coordinates of a point to start placing intervals from. The first
point in the resulting list has these coordinates.
:param float lon2, lat2, depth2:
Coordinates of the other end of the great circle arc segment
to put intervals on. The last resulting point might be closer
to the first reference point than the second one or further,
since the number of segments is taken as rounded division of
length between two reference points and ``length``.
:param length:
Required distance between two subsequent resulting points, in km.
:returns:
Tuple of three 1d numpy arrays: longitudes, latitudes and depths
of resulting points respectively.
Rounds the distance between two reference points with respect
to ``length`` and calls :func:`npoints_towards`.
"""
assert length > 0
hdist = geodetic_distance(lon1, lat1, lon2, lat2)
vdist = depth2 - depth1
# if this method is called multiple times with coordinates that are
# separated by the same distance, because of floating point imprecisions
# the total distance may have slightly different values (for instance if
# the distance between two set of points is 65 km, total distance can be
# 64.9999999999989910 and 65.0000000000020322). These two values bring to
# two different values of num_intervals (32 in the first case and 33 in
# the second), and this is a problem because for the same distance we
# should have the same number of intervals. To reduce potential differences
# due to floating point errors, we therefore round total_distance to a
# fixed precision (7)
total_distance = round(numpy.sqrt(hdist ** 2 + vdist ** 2), 7)
num_intervals = int(round(total_distance / length))
if num_intervals == 0:
return numpy.array([lon1]), numpy.array([lat1]), numpy.array([depth1])
dist_factor = (length * num_intervals) / total_distance
return npoints_towards(
lon1, lat1, depth1, azimuth(lon1, lat1, lon2, lat2),
hdist * dist_factor, vdist * dist_factor, num_intervals + 1) | python | def intervals_between(lon1, lat1, depth1, lon2, lat2, depth2, length):
"""
Find a list of points between two given ones that lie on the same
great circle arc and are equally spaced by ``length`` km.
:param float lon1, lat1, depth1:
Coordinates of a point to start placing intervals from. The first
point in the resulting list has these coordinates.
:param float lon2, lat2, depth2:
Coordinates of the other end of the great circle arc segment
to put intervals on. The last resulting point might be closer
to the first reference point than the second one or further,
since the number of segments is taken as rounded division of
length between two reference points and ``length``.
:param length:
Required distance between two subsequent resulting points, in km.
:returns:
Tuple of three 1d numpy arrays: longitudes, latitudes and depths
of resulting points respectively.
Rounds the distance between two reference points with respect
to ``length`` and calls :func:`npoints_towards`.
"""
assert length > 0
hdist = geodetic_distance(lon1, lat1, lon2, lat2)
vdist = depth2 - depth1
# if this method is called multiple times with coordinates that are
# separated by the same distance, because of floating point imprecisions
# the total distance may have slightly different values (for instance if
# the distance between two set of points is 65 km, total distance can be
# 64.9999999999989910 and 65.0000000000020322). These two values bring to
# two different values of num_intervals (32 in the first case and 33 in
# the second), and this is a problem because for the same distance we
# should have the same number of intervals. To reduce potential differences
# due to floating point errors, we therefore round total_distance to a
# fixed precision (7)
total_distance = round(numpy.sqrt(hdist ** 2 + vdist ** 2), 7)
num_intervals = int(round(total_distance / length))
if num_intervals == 0:
return numpy.array([lon1]), numpy.array([lat1]), numpy.array([depth1])
dist_factor = (length * num_intervals) / total_distance
return npoints_towards(
lon1, lat1, depth1, azimuth(lon1, lat1, lon2, lat2),
hdist * dist_factor, vdist * dist_factor, num_intervals + 1) | [
"def",
"intervals_between",
"(",
"lon1",
",",
"lat1",
",",
"depth1",
",",
"lon2",
",",
"lat2",
",",
"depth2",
",",
"length",
")",
":",
"assert",
"length",
">",
"0",
"hdist",
"=",
"geodetic_distance",
"(",
"lon1",
",",
"lat1",
",",
"lon2",
",",
"lat2",
")",
"vdist",
"=",
"depth2",
"-",
"depth1",
"# if this method is called multiple times with coordinates that are",
"# separated by the same distance, because of floating point imprecisions",
"# the total distance may have slightly different values (for instance if",
"# the distance between two set of points is 65 km, total distance can be",
"# 64.9999999999989910 and 65.0000000000020322). These two values bring to",
"# two different values of num_intervals (32 in the first case and 33 in",
"# the second), and this is a problem because for the same distance we",
"# should have the same number of intervals. To reduce potential differences",
"# due to floating point errors, we therefore round total_distance to a",
"# fixed precision (7)",
"total_distance",
"=",
"round",
"(",
"numpy",
".",
"sqrt",
"(",
"hdist",
"**",
"2",
"+",
"vdist",
"**",
"2",
")",
",",
"7",
")",
"num_intervals",
"=",
"int",
"(",
"round",
"(",
"total_distance",
"/",
"length",
")",
")",
"if",
"num_intervals",
"==",
"0",
":",
"return",
"numpy",
".",
"array",
"(",
"[",
"lon1",
"]",
")",
",",
"numpy",
".",
"array",
"(",
"[",
"lat1",
"]",
")",
",",
"numpy",
".",
"array",
"(",
"[",
"depth1",
"]",
")",
"dist_factor",
"=",
"(",
"length",
"*",
"num_intervals",
")",
"/",
"total_distance",
"return",
"npoints_towards",
"(",
"lon1",
",",
"lat1",
",",
"depth1",
",",
"azimuth",
"(",
"lon1",
",",
"lat1",
",",
"lon2",
",",
"lat2",
")",
",",
"hdist",
"*",
"dist_factor",
",",
"vdist",
"*",
"dist_factor",
",",
"num_intervals",
"+",
"1",
")"
] | Find a list of points between two given ones that lie on the same
great circle arc and are equally spaced by ``length`` km.
:param float lon1, lat1, depth1:
Coordinates of a point to start placing intervals from. The first
point in the resulting list has these coordinates.
:param float lon2, lat2, depth2:
Coordinates of the other end of the great circle arc segment
to put intervals on. The last resulting point might be closer
to the first reference point than the second one or further,
since the number of segments is taken as rounded division of
length between two reference points and ``length``.
:param length:
Required distance between two subsequent resulting points, in km.
:returns:
Tuple of three 1d numpy arrays: longitudes, latitudes and depths
of resulting points respectively.
Rounds the distance between two reference points with respect
to ``length`` and calls :func:`npoints_towards`. | [
"Find",
"a",
"list",
"of",
"points",
"between",
"two",
"given",
"ones",
"that",
"lie",
"on",
"the",
"same",
"great",
"circle",
"arc",
"and",
"are",
"equally",
"spaced",
"by",
"length",
"km",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/geodetic.py#L259-L302 | train | 233,088 |
gem/oq-engine | openquake/hazardlib/geo/geodetic.py | npoints_between | def npoints_between(lon1, lat1, depth1, lon2, lat2, depth2, npoints):
"""
Find a list of specified number of points between two given ones that are
equally spaced along the great circle arc connecting given points.
:param float lon1, lat1, depth1:
Coordinates of a point to start from. The first point in a resulting
list has these coordinates.
:param float lon2, lat2, depth2:
Coordinates of a point to finish at. The last point in a resulting
list has these coordinates.
:param npoints:
Integer number of points to return. First and last points count,
so if there have to be two intervals, ``npoints`` should be 3.
:returns:
Tuple of three 1d numpy arrays: longitudes, latitudes and depths
of resulting points respectively.
Finds distance between two reference points and calls
:func:`npoints_towards`.
"""
hdist = geodetic_distance(lon1, lat1, lon2, lat2)
vdist = depth2 - depth1
rlons, rlats, rdepths = npoints_towards(
lon1, lat1, depth1, azimuth(lon1, lat1, lon2, lat2),
hdist, vdist, npoints
)
# the last point should be left intact
rlons[-1] = lon2
rlats[-1] = lat2
rdepths[-1] = depth2
return rlons, rlats, rdepths | python | def npoints_between(lon1, lat1, depth1, lon2, lat2, depth2, npoints):
"""
Find a list of specified number of points between two given ones that are
equally spaced along the great circle arc connecting given points.
:param float lon1, lat1, depth1:
Coordinates of a point to start from. The first point in a resulting
list has these coordinates.
:param float lon2, lat2, depth2:
Coordinates of a point to finish at. The last point in a resulting
list has these coordinates.
:param npoints:
Integer number of points to return. First and last points count,
so if there have to be two intervals, ``npoints`` should be 3.
:returns:
Tuple of three 1d numpy arrays: longitudes, latitudes and depths
of resulting points respectively.
Finds distance between two reference points and calls
:func:`npoints_towards`.
"""
hdist = geodetic_distance(lon1, lat1, lon2, lat2)
vdist = depth2 - depth1
rlons, rlats, rdepths = npoints_towards(
lon1, lat1, depth1, azimuth(lon1, lat1, lon2, lat2),
hdist, vdist, npoints
)
# the last point should be left intact
rlons[-1] = lon2
rlats[-1] = lat2
rdepths[-1] = depth2
return rlons, rlats, rdepths | [
"def",
"npoints_between",
"(",
"lon1",
",",
"lat1",
",",
"depth1",
",",
"lon2",
",",
"lat2",
",",
"depth2",
",",
"npoints",
")",
":",
"hdist",
"=",
"geodetic_distance",
"(",
"lon1",
",",
"lat1",
",",
"lon2",
",",
"lat2",
")",
"vdist",
"=",
"depth2",
"-",
"depth1",
"rlons",
",",
"rlats",
",",
"rdepths",
"=",
"npoints_towards",
"(",
"lon1",
",",
"lat1",
",",
"depth1",
",",
"azimuth",
"(",
"lon1",
",",
"lat1",
",",
"lon2",
",",
"lat2",
")",
",",
"hdist",
",",
"vdist",
",",
"npoints",
")",
"# the last point should be left intact",
"rlons",
"[",
"-",
"1",
"]",
"=",
"lon2",
"rlats",
"[",
"-",
"1",
"]",
"=",
"lat2",
"rdepths",
"[",
"-",
"1",
"]",
"=",
"depth2",
"return",
"rlons",
",",
"rlats",
",",
"rdepths"
] | Find a list of specified number of points between two given ones that are
equally spaced along the great circle arc connecting given points.
:param float lon1, lat1, depth1:
Coordinates of a point to start from. The first point in a resulting
list has these coordinates.
:param float lon2, lat2, depth2:
Coordinates of a point to finish at. The last point in a resulting
list has these coordinates.
:param npoints:
Integer number of points to return. First and last points count,
so if there have to be two intervals, ``npoints`` should be 3.
:returns:
Tuple of three 1d numpy arrays: longitudes, latitudes and depths
of resulting points respectively.
Finds distance between two reference points and calls
:func:`npoints_towards`. | [
"Find",
"a",
"list",
"of",
"specified",
"number",
"of",
"points",
"between",
"two",
"given",
"ones",
"that",
"are",
"equally",
"spaced",
"along",
"the",
"great",
"circle",
"arc",
"connecting",
"given",
"points",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/geodetic.py#L305-L336 | train | 233,089 |
gem/oq-engine | openquake/hazardlib/geo/geodetic.py | npoints_towards | def npoints_towards(lon, lat, depth, azimuth, hdist, vdist, npoints):
"""
Find a list of specified number of points starting from a given one
along a great circle arc with a given azimuth measured in a given point.
:param float lon, lat, depth:
Coordinates of a point to start from. The first point in a resulting
list has these coordinates.
:param azimuth:
A direction representing a great circle arc together with a reference
point.
:param hdist:
Horizontal (geodetic) distance from reference point to the last point
of the resulting list, in km.
:param vdist:
Vertical (depth) distance between reference and the last point, in km.
:param npoints:
Integer number of points to return. First and last points count,
so if there have to be two intervals, ``npoints`` should be 3.
:returns:
Tuple of three 1d numpy arrays: longitudes, latitudes and depths
of resulting points respectively.
Implements "completely general but more complicated algorithm" from
http://williams.best.vwh.net/avform.htm#LL
"""
assert npoints > 1
rlon, rlat = numpy.radians(lon), numpy.radians(lat)
tc = numpy.radians(360 - azimuth)
hdists = numpy.arange(npoints, dtype=float)
hdists *= (hdist / EARTH_RADIUS) / (npoints - 1)
vdists = numpy.arange(npoints, dtype=float)
vdists *= vdist / (npoints - 1)
sin_dists = numpy.sin(hdists)
cos_dists = numpy.cos(hdists)
sin_lat = numpy.sin(rlat)
cos_lat = numpy.cos(rlat)
sin_lats = sin_lat * cos_dists + cos_lat * sin_dists * numpy.cos(tc)
lats = numpy.degrees(numpy.arcsin(sin_lats))
dlon = numpy.arctan2(numpy.sin(tc) * sin_dists * cos_lat,
cos_dists - sin_lat * sin_lats)
lons = numpy.mod(rlon - dlon + numpy.pi, 2 * numpy.pi) - numpy.pi
lons = numpy.degrees(lons)
depths = vdists + depth
# the first point should be left intact
lons[0] = lon
lats[0] = lat
depths[0] = depth
return lons, lats, depths | python | def npoints_towards(lon, lat, depth, azimuth, hdist, vdist, npoints):
"""
Find a list of specified number of points starting from a given one
along a great circle arc with a given azimuth measured in a given point.
:param float lon, lat, depth:
Coordinates of a point to start from. The first point in a resulting
list has these coordinates.
:param azimuth:
A direction representing a great circle arc together with a reference
point.
:param hdist:
Horizontal (geodetic) distance from reference point to the last point
of the resulting list, in km.
:param vdist:
Vertical (depth) distance between reference and the last point, in km.
:param npoints:
Integer number of points to return. First and last points count,
so if there have to be two intervals, ``npoints`` should be 3.
:returns:
Tuple of three 1d numpy arrays: longitudes, latitudes and depths
of resulting points respectively.
Implements "completely general but more complicated algorithm" from
http://williams.best.vwh.net/avform.htm#LL
"""
assert npoints > 1
rlon, rlat = numpy.radians(lon), numpy.radians(lat)
tc = numpy.radians(360 - azimuth)
hdists = numpy.arange(npoints, dtype=float)
hdists *= (hdist / EARTH_RADIUS) / (npoints - 1)
vdists = numpy.arange(npoints, dtype=float)
vdists *= vdist / (npoints - 1)
sin_dists = numpy.sin(hdists)
cos_dists = numpy.cos(hdists)
sin_lat = numpy.sin(rlat)
cos_lat = numpy.cos(rlat)
sin_lats = sin_lat * cos_dists + cos_lat * sin_dists * numpy.cos(tc)
lats = numpy.degrees(numpy.arcsin(sin_lats))
dlon = numpy.arctan2(numpy.sin(tc) * sin_dists * cos_lat,
cos_dists - sin_lat * sin_lats)
lons = numpy.mod(rlon - dlon + numpy.pi, 2 * numpy.pi) - numpy.pi
lons = numpy.degrees(lons)
depths = vdists + depth
# the first point should be left intact
lons[0] = lon
lats[0] = lat
depths[0] = depth
return lons, lats, depths | [
"def",
"npoints_towards",
"(",
"lon",
",",
"lat",
",",
"depth",
",",
"azimuth",
",",
"hdist",
",",
"vdist",
",",
"npoints",
")",
":",
"assert",
"npoints",
">",
"1",
"rlon",
",",
"rlat",
"=",
"numpy",
".",
"radians",
"(",
"lon",
")",
",",
"numpy",
".",
"radians",
"(",
"lat",
")",
"tc",
"=",
"numpy",
".",
"radians",
"(",
"360",
"-",
"azimuth",
")",
"hdists",
"=",
"numpy",
".",
"arange",
"(",
"npoints",
",",
"dtype",
"=",
"float",
")",
"hdists",
"*=",
"(",
"hdist",
"/",
"EARTH_RADIUS",
")",
"/",
"(",
"npoints",
"-",
"1",
")",
"vdists",
"=",
"numpy",
".",
"arange",
"(",
"npoints",
",",
"dtype",
"=",
"float",
")",
"vdists",
"*=",
"vdist",
"/",
"(",
"npoints",
"-",
"1",
")",
"sin_dists",
"=",
"numpy",
".",
"sin",
"(",
"hdists",
")",
"cos_dists",
"=",
"numpy",
".",
"cos",
"(",
"hdists",
")",
"sin_lat",
"=",
"numpy",
".",
"sin",
"(",
"rlat",
")",
"cos_lat",
"=",
"numpy",
".",
"cos",
"(",
"rlat",
")",
"sin_lats",
"=",
"sin_lat",
"*",
"cos_dists",
"+",
"cos_lat",
"*",
"sin_dists",
"*",
"numpy",
".",
"cos",
"(",
"tc",
")",
"lats",
"=",
"numpy",
".",
"degrees",
"(",
"numpy",
".",
"arcsin",
"(",
"sin_lats",
")",
")",
"dlon",
"=",
"numpy",
".",
"arctan2",
"(",
"numpy",
".",
"sin",
"(",
"tc",
")",
"*",
"sin_dists",
"*",
"cos_lat",
",",
"cos_dists",
"-",
"sin_lat",
"*",
"sin_lats",
")",
"lons",
"=",
"numpy",
".",
"mod",
"(",
"rlon",
"-",
"dlon",
"+",
"numpy",
".",
"pi",
",",
"2",
"*",
"numpy",
".",
"pi",
")",
"-",
"numpy",
".",
"pi",
"lons",
"=",
"numpy",
".",
"degrees",
"(",
"lons",
")",
"depths",
"=",
"vdists",
"+",
"depth",
"# the first point should be left intact",
"lons",
"[",
"0",
"]",
"=",
"lon",
"lats",
"[",
"0",
"]",
"=",
"lat",
"depths",
"[",
"0",
"]",
"=",
"depth",
"return",
"lons",
",",
"lats",
",",
"depths"
] | Find a list of specified number of points starting from a given one
along a great circle arc with a given azimuth measured in a given point.
:param float lon, lat, depth:
Coordinates of a point to start from. The first point in a resulting
list has these coordinates.
:param azimuth:
A direction representing a great circle arc together with a reference
point.
:param hdist:
Horizontal (geodetic) distance from reference point to the last point
of the resulting list, in km.
:param vdist:
Vertical (depth) distance between reference and the last point, in km.
:param npoints:
Integer number of points to return. First and last points count,
so if there have to be two intervals, ``npoints`` should be 3.
:returns:
Tuple of three 1d numpy arrays: longitudes, latitudes and depths
of resulting points respectively.
Implements "completely general but more complicated algorithm" from
http://williams.best.vwh.net/avform.htm#LL | [
"Find",
"a",
"list",
"of",
"specified",
"number",
"of",
"points",
"starting",
"from",
"a",
"given",
"one",
"along",
"a",
"great",
"circle",
"arc",
"with",
"a",
"given",
"azimuth",
"measured",
"in",
"a",
"given",
"point",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/geodetic.py#L339-L393 | train | 233,090 |
gem/oq-engine | openquake/hazardlib/geo/geodetic.py | _prepare_coords | def _prepare_coords(lons1, lats1, lons2, lats2):
"""
Convert two pairs of spherical coordinates in decimal degrees
to numpy arrays of radians. Makes sure that respective coordinates
in pairs have the same shape.
"""
lons1 = numpy.radians(lons1)
lats1 = numpy.radians(lats1)
assert lons1.shape == lats1.shape
lons2 = numpy.radians(lons2)
lats2 = numpy.radians(lats2)
assert lons2.shape == lats2.shape
return lons1, lats1, lons2, lats2 | python | def _prepare_coords(lons1, lats1, lons2, lats2):
"""
Convert two pairs of spherical coordinates in decimal degrees
to numpy arrays of radians. Makes sure that respective coordinates
in pairs have the same shape.
"""
lons1 = numpy.radians(lons1)
lats1 = numpy.radians(lats1)
assert lons1.shape == lats1.shape
lons2 = numpy.radians(lons2)
lats2 = numpy.radians(lats2)
assert lons2.shape == lats2.shape
return lons1, lats1, lons2, lats2 | [
"def",
"_prepare_coords",
"(",
"lons1",
",",
"lats1",
",",
"lons2",
",",
"lats2",
")",
":",
"lons1",
"=",
"numpy",
".",
"radians",
"(",
"lons1",
")",
"lats1",
"=",
"numpy",
".",
"radians",
"(",
"lats1",
")",
"assert",
"lons1",
".",
"shape",
"==",
"lats1",
".",
"shape",
"lons2",
"=",
"numpy",
".",
"radians",
"(",
"lons2",
")",
"lats2",
"=",
"numpy",
".",
"radians",
"(",
"lats2",
")",
"assert",
"lons2",
".",
"shape",
"==",
"lats2",
".",
"shape",
"return",
"lons1",
",",
"lats1",
",",
"lons2",
",",
"lats2"
] | Convert two pairs of spherical coordinates in decimal degrees
to numpy arrays of radians. Makes sure that respective coordinates
in pairs have the same shape. | [
"Convert",
"two",
"pairs",
"of",
"spherical",
"coordinates",
"in",
"decimal",
"degrees",
"to",
"numpy",
"arrays",
"of",
"radians",
".",
"Makes",
"sure",
"that",
"respective",
"coordinates",
"in",
"pairs",
"have",
"the",
"same",
"shape",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/geodetic.py#L528-L540 | train | 233,091 |
gem/oq-engine | openquake/hmtk/sources/simple_fault_source.py | mtkSimpleFaultSource.select_catalogue | def select_catalogue(self, selector, distance,
distance_metric='joyner-boore', upper_eq_depth=None,
lower_eq_depth=None):
'''
Selects earthquakes within a distance of the fault
:param selector:
Populated instance of :class:
`openquake.hmtk.seismicity.selector.CatalogueSelector`
:param distance:
Distance from point (km) for selection
:param str distance_metric
Choice of fault source distance metric 'joyner-boore' or 'rupture'
:param float upper_eq_depth:
Upper hypocentral depth of hypocentres to be selected
:param float lower_eq_depth:
Lower hypocentral depth of hypocentres to be selected
'''
if selector.catalogue.get_number_events() < 1:
raise ValueError('No events found in catalogue!')
# rupture metric is selected and dip != 90 or 'rupture'
if ('rupture' in distance_metric) and (fabs(self.dip - 90) > 1E-5):
# Use rupture distance
self.catalogue = selector.within_rupture_distance(
self.geometry,
distance,
upper_depth=upper_eq_depth,
lower_depth=lower_eq_depth)
else:
# Use Joyner-Boore distance
self.catalogue = selector.within_joyner_boore_distance(
self.geometry,
distance,
upper_depth=upper_eq_depth,
lower_depth=lower_eq_depth)
if self.catalogue.get_number_events() < 5:
# Throw a warning regarding the small number of earthquakes in
# the source!
warnings.warn('Source %s (%s) has fewer than 5 events'
% (self.id, self.name)) | python | def select_catalogue(self, selector, distance,
distance_metric='joyner-boore', upper_eq_depth=None,
lower_eq_depth=None):
'''
Selects earthquakes within a distance of the fault
:param selector:
Populated instance of :class:
`openquake.hmtk.seismicity.selector.CatalogueSelector`
:param distance:
Distance from point (km) for selection
:param str distance_metric
Choice of fault source distance metric 'joyner-boore' or 'rupture'
:param float upper_eq_depth:
Upper hypocentral depth of hypocentres to be selected
:param float lower_eq_depth:
Lower hypocentral depth of hypocentres to be selected
'''
if selector.catalogue.get_number_events() < 1:
raise ValueError('No events found in catalogue!')
# rupture metric is selected and dip != 90 or 'rupture'
if ('rupture' in distance_metric) and (fabs(self.dip - 90) > 1E-5):
# Use rupture distance
self.catalogue = selector.within_rupture_distance(
self.geometry,
distance,
upper_depth=upper_eq_depth,
lower_depth=lower_eq_depth)
else:
# Use Joyner-Boore distance
self.catalogue = selector.within_joyner_boore_distance(
self.geometry,
distance,
upper_depth=upper_eq_depth,
lower_depth=lower_eq_depth)
if self.catalogue.get_number_events() < 5:
# Throw a warning regarding the small number of earthquakes in
# the source!
warnings.warn('Source %s (%s) has fewer than 5 events'
% (self.id, self.name)) | [
"def",
"select_catalogue",
"(",
"self",
",",
"selector",
",",
"distance",
",",
"distance_metric",
"=",
"'joyner-boore'",
",",
"upper_eq_depth",
"=",
"None",
",",
"lower_eq_depth",
"=",
"None",
")",
":",
"if",
"selector",
".",
"catalogue",
".",
"get_number_events",
"(",
")",
"<",
"1",
":",
"raise",
"ValueError",
"(",
"'No events found in catalogue!'",
")",
"# rupture metric is selected and dip != 90 or 'rupture'",
"if",
"(",
"'rupture'",
"in",
"distance_metric",
")",
"and",
"(",
"fabs",
"(",
"self",
".",
"dip",
"-",
"90",
")",
">",
"1E-5",
")",
":",
"# Use rupture distance",
"self",
".",
"catalogue",
"=",
"selector",
".",
"within_rupture_distance",
"(",
"self",
".",
"geometry",
",",
"distance",
",",
"upper_depth",
"=",
"upper_eq_depth",
",",
"lower_depth",
"=",
"lower_eq_depth",
")",
"else",
":",
"# Use Joyner-Boore distance",
"self",
".",
"catalogue",
"=",
"selector",
".",
"within_joyner_boore_distance",
"(",
"self",
".",
"geometry",
",",
"distance",
",",
"upper_depth",
"=",
"upper_eq_depth",
",",
"lower_depth",
"=",
"lower_eq_depth",
")",
"if",
"self",
".",
"catalogue",
".",
"get_number_events",
"(",
")",
"<",
"5",
":",
"# Throw a warning regarding the small number of earthquakes in",
"# the source!",
"warnings",
".",
"warn",
"(",
"'Source %s (%s) has fewer than 5 events'",
"%",
"(",
"self",
".",
"id",
",",
"self",
".",
"name",
")",
")"
] | Selects earthquakes within a distance of the fault
:param selector:
Populated instance of :class:
`openquake.hmtk.seismicity.selector.CatalogueSelector`
:param distance:
Distance from point (km) for selection
:param str distance_metric
Choice of fault source distance metric 'joyner-boore' or 'rupture'
:param float upper_eq_depth:
Upper hypocentral depth of hypocentres to be selected
:param float lower_eq_depth:
Lower hypocentral depth of hypocentres to be selected | [
"Selects",
"earthquakes",
"within",
"a",
"distance",
"of",
"the",
"fault"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/sources/simple_fault_source.py#L191-L237 | train | 233,092 |
gem/oq-engine | openquake/hmtk/plotting/faults/geology_mfd_plot.py | plot_recurrence_models | def plot_recurrence_models(
configs, area, slip, msr, rake,
shear_modulus=30.0, disp_length_ratio=1.25E-5, msr_sigma=0.,
figure_size=(8, 6), filename=None, filetype='png', dpi=300, ax=None):
"""
Plots a set of recurrence models
:param list configs:
List of configuration dictionaries
"""
if ax is None:
fig, ax = plt.subplots(figsize=figure_size)
else:
fig = ax.get_figure()
for config in configs:
model = RecurrenceBranch(area, slip, msr, rake, shear_modulus,
disp_length_ratio, msr_sigma, weight=1.0)
model.get_recurrence(config)
occurrence = model.recurrence.occur_rates
cumulative = np.array([np.sum(occurrence[iloc:])
for iloc in range(0, len(occurrence))])
if 'AndersonLuco' in config['Model_Name']:
flt_label = config['Model_Name'] + ' - ' + config['Model_Type'] +\
' Type'
else:
flt_label = config['Model_Name']
flt_color = np.random.uniform(0.1, 1.0, 3)
ax.semilogy(model.magnitudes, cumulative, '-', label=flt_label,
color=flt_color, linewidth=2.)
ax.semilogy(model.magnitudes, model.recurrence.occur_rates, '--',
color=flt_color, linewidth=2.)
ax.set_xlabel('Magnitude')
ax.set_ylabel('Annual Rate')
ax.legend(bbox_to_anchor=(1.1, 1.0))
_save_image(fig, filename, filetype, dpi) | python | def plot_recurrence_models(
configs, area, slip, msr, rake,
shear_modulus=30.0, disp_length_ratio=1.25E-5, msr_sigma=0.,
figure_size=(8, 6), filename=None, filetype='png', dpi=300, ax=None):
"""
Plots a set of recurrence models
:param list configs:
List of configuration dictionaries
"""
if ax is None:
fig, ax = plt.subplots(figsize=figure_size)
else:
fig = ax.get_figure()
for config in configs:
model = RecurrenceBranch(area, slip, msr, rake, shear_modulus,
disp_length_ratio, msr_sigma, weight=1.0)
model.get_recurrence(config)
occurrence = model.recurrence.occur_rates
cumulative = np.array([np.sum(occurrence[iloc:])
for iloc in range(0, len(occurrence))])
if 'AndersonLuco' in config['Model_Name']:
flt_label = config['Model_Name'] + ' - ' + config['Model_Type'] +\
' Type'
else:
flt_label = config['Model_Name']
flt_color = np.random.uniform(0.1, 1.0, 3)
ax.semilogy(model.magnitudes, cumulative, '-', label=flt_label,
color=flt_color, linewidth=2.)
ax.semilogy(model.magnitudes, model.recurrence.occur_rates, '--',
color=flt_color, linewidth=2.)
ax.set_xlabel('Magnitude')
ax.set_ylabel('Annual Rate')
ax.legend(bbox_to_anchor=(1.1, 1.0))
_save_image(fig, filename, filetype, dpi) | [
"def",
"plot_recurrence_models",
"(",
"configs",
",",
"area",
",",
"slip",
",",
"msr",
",",
"rake",
",",
"shear_modulus",
"=",
"30.0",
",",
"disp_length_ratio",
"=",
"1.25E-5",
",",
"msr_sigma",
"=",
"0.",
",",
"figure_size",
"=",
"(",
"8",
",",
"6",
")",
",",
"filename",
"=",
"None",
",",
"filetype",
"=",
"'png'",
",",
"dpi",
"=",
"300",
",",
"ax",
"=",
"None",
")",
":",
"if",
"ax",
"is",
"None",
":",
"fig",
",",
"ax",
"=",
"plt",
".",
"subplots",
"(",
"figsize",
"=",
"figure_size",
")",
"else",
":",
"fig",
"=",
"ax",
".",
"get_figure",
"(",
")",
"for",
"config",
"in",
"configs",
":",
"model",
"=",
"RecurrenceBranch",
"(",
"area",
",",
"slip",
",",
"msr",
",",
"rake",
",",
"shear_modulus",
",",
"disp_length_ratio",
",",
"msr_sigma",
",",
"weight",
"=",
"1.0",
")",
"model",
".",
"get_recurrence",
"(",
"config",
")",
"occurrence",
"=",
"model",
".",
"recurrence",
".",
"occur_rates",
"cumulative",
"=",
"np",
".",
"array",
"(",
"[",
"np",
".",
"sum",
"(",
"occurrence",
"[",
"iloc",
":",
"]",
")",
"for",
"iloc",
"in",
"range",
"(",
"0",
",",
"len",
"(",
"occurrence",
")",
")",
"]",
")",
"if",
"'AndersonLuco'",
"in",
"config",
"[",
"'Model_Name'",
"]",
":",
"flt_label",
"=",
"config",
"[",
"'Model_Name'",
"]",
"+",
"' - '",
"+",
"config",
"[",
"'Model_Type'",
"]",
"+",
"' Type'",
"else",
":",
"flt_label",
"=",
"config",
"[",
"'Model_Name'",
"]",
"flt_color",
"=",
"np",
".",
"random",
".",
"uniform",
"(",
"0.1",
",",
"1.0",
",",
"3",
")",
"ax",
".",
"semilogy",
"(",
"model",
".",
"magnitudes",
",",
"cumulative",
",",
"'-'",
",",
"label",
"=",
"flt_label",
",",
"color",
"=",
"flt_color",
",",
"linewidth",
"=",
"2.",
")",
"ax",
".",
"semilogy",
"(",
"model",
".",
"magnitudes",
",",
"model",
".",
"recurrence",
".",
"occur_rates",
",",
"'--'",
",",
"color",
"=",
"flt_color",
",",
"linewidth",
"=",
"2.",
")",
"ax",
".",
"set_xlabel",
"(",
"'Magnitude'",
")",
"ax",
".",
"set_ylabel",
"(",
"'Annual Rate'",
")",
"ax",
".",
"legend",
"(",
"bbox_to_anchor",
"=",
"(",
"1.1",
",",
"1.0",
")",
")",
"_save_image",
"(",
"fig",
",",
"filename",
",",
"filetype",
",",
"dpi",
")"
] | Plots a set of recurrence models
:param list configs:
List of configuration dictionaries | [
"Plots",
"a",
"set",
"of",
"recurrence",
"models"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/plotting/faults/geology_mfd_plot.py#L69-L105 | train | 233,093 |
gem/oq-engine | openquake/hazardlib/sourcewriter.py | build_area_source_geometry | def build_area_source_geometry(area_source):
"""
Returns the area source geometry as a Node
:param area_source:
Area source model as an instance of the :class:
`openquake.hazardlib.source.area.AreaSource`
:returns:
Instance of :class:`openquake.baselib.node.Node`
"""
geom = []
for lon_lat in zip(area_source.polygon.lons, area_source.polygon.lats):
geom.extend(lon_lat)
poslist_node = Node("gml:posList", text=geom)
linear_ring_node = Node("gml:LinearRing", nodes=[poslist_node])
exterior_node = Node("gml:exterior", nodes=[linear_ring_node])
polygon_node = Node("gml:Polygon", nodes=[exterior_node])
upper_depth_node = Node(
"upperSeismoDepth", text=area_source.upper_seismogenic_depth)
lower_depth_node = Node(
"lowerSeismoDepth", text=area_source.lower_seismogenic_depth)
return Node(
"areaGeometry", {'discretization': area_source.area_discretization},
nodes=[polygon_node, upper_depth_node, lower_depth_node]) | python | def build_area_source_geometry(area_source):
"""
Returns the area source geometry as a Node
:param area_source:
Area source model as an instance of the :class:
`openquake.hazardlib.source.area.AreaSource`
:returns:
Instance of :class:`openquake.baselib.node.Node`
"""
geom = []
for lon_lat in zip(area_source.polygon.lons, area_source.polygon.lats):
geom.extend(lon_lat)
poslist_node = Node("gml:posList", text=geom)
linear_ring_node = Node("gml:LinearRing", nodes=[poslist_node])
exterior_node = Node("gml:exterior", nodes=[linear_ring_node])
polygon_node = Node("gml:Polygon", nodes=[exterior_node])
upper_depth_node = Node(
"upperSeismoDepth", text=area_source.upper_seismogenic_depth)
lower_depth_node = Node(
"lowerSeismoDepth", text=area_source.lower_seismogenic_depth)
return Node(
"areaGeometry", {'discretization': area_source.area_discretization},
nodes=[polygon_node, upper_depth_node, lower_depth_node]) | [
"def",
"build_area_source_geometry",
"(",
"area_source",
")",
":",
"geom",
"=",
"[",
"]",
"for",
"lon_lat",
"in",
"zip",
"(",
"area_source",
".",
"polygon",
".",
"lons",
",",
"area_source",
".",
"polygon",
".",
"lats",
")",
":",
"geom",
".",
"extend",
"(",
"lon_lat",
")",
"poslist_node",
"=",
"Node",
"(",
"\"gml:posList\"",
",",
"text",
"=",
"geom",
")",
"linear_ring_node",
"=",
"Node",
"(",
"\"gml:LinearRing\"",
",",
"nodes",
"=",
"[",
"poslist_node",
"]",
")",
"exterior_node",
"=",
"Node",
"(",
"\"gml:exterior\"",
",",
"nodes",
"=",
"[",
"linear_ring_node",
"]",
")",
"polygon_node",
"=",
"Node",
"(",
"\"gml:Polygon\"",
",",
"nodes",
"=",
"[",
"exterior_node",
"]",
")",
"upper_depth_node",
"=",
"Node",
"(",
"\"upperSeismoDepth\"",
",",
"text",
"=",
"area_source",
".",
"upper_seismogenic_depth",
")",
"lower_depth_node",
"=",
"Node",
"(",
"\"lowerSeismoDepth\"",
",",
"text",
"=",
"area_source",
".",
"lower_seismogenic_depth",
")",
"return",
"Node",
"(",
"\"areaGeometry\"",
",",
"{",
"'discretization'",
":",
"area_source",
".",
"area_discretization",
"}",
",",
"nodes",
"=",
"[",
"polygon_node",
",",
"upper_depth_node",
",",
"lower_depth_node",
"]",
")"
] | Returns the area source geometry as a Node
:param area_source:
Area source model as an instance of the :class:
`openquake.hazardlib.source.area.AreaSource`
:returns:
Instance of :class:`openquake.baselib.node.Node` | [
"Returns",
"the",
"area",
"source",
"geometry",
"as",
"a",
"Node"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/sourcewriter.py#L35-L58 | train | 233,094 |
gem/oq-engine | openquake/hazardlib/sourcewriter.py | build_point_source_geometry | def build_point_source_geometry(point_source):
"""
Returns the poing source geometry as a Node
:param point_source:
Point source model as an instance of the :class:
`openquake.hazardlib.source.point.PointSource`
:returns:
Instance of :class:`openquake.baselib.node.Node`
"""
xy = point_source.location.x, point_source.location.y
pos_node = Node("gml:pos", text=xy)
point_node = Node("gml:Point", nodes=[pos_node])
upper_depth_node = Node(
"upperSeismoDepth", text=point_source.upper_seismogenic_depth)
lower_depth_node = Node(
"lowerSeismoDepth", text=point_source.lower_seismogenic_depth)
return Node(
"pointGeometry",
nodes=[point_node, upper_depth_node, lower_depth_node]) | python | def build_point_source_geometry(point_source):
"""
Returns the poing source geometry as a Node
:param point_source:
Point source model as an instance of the :class:
`openquake.hazardlib.source.point.PointSource`
:returns:
Instance of :class:`openquake.baselib.node.Node`
"""
xy = point_source.location.x, point_source.location.y
pos_node = Node("gml:pos", text=xy)
point_node = Node("gml:Point", nodes=[pos_node])
upper_depth_node = Node(
"upperSeismoDepth", text=point_source.upper_seismogenic_depth)
lower_depth_node = Node(
"lowerSeismoDepth", text=point_source.lower_seismogenic_depth)
return Node(
"pointGeometry",
nodes=[point_node, upper_depth_node, lower_depth_node]) | [
"def",
"build_point_source_geometry",
"(",
"point_source",
")",
":",
"xy",
"=",
"point_source",
".",
"location",
".",
"x",
",",
"point_source",
".",
"location",
".",
"y",
"pos_node",
"=",
"Node",
"(",
"\"gml:pos\"",
",",
"text",
"=",
"xy",
")",
"point_node",
"=",
"Node",
"(",
"\"gml:Point\"",
",",
"nodes",
"=",
"[",
"pos_node",
"]",
")",
"upper_depth_node",
"=",
"Node",
"(",
"\"upperSeismoDepth\"",
",",
"text",
"=",
"point_source",
".",
"upper_seismogenic_depth",
")",
"lower_depth_node",
"=",
"Node",
"(",
"\"lowerSeismoDepth\"",
",",
"text",
"=",
"point_source",
".",
"lower_seismogenic_depth",
")",
"return",
"Node",
"(",
"\"pointGeometry\"",
",",
"nodes",
"=",
"[",
"point_node",
",",
"upper_depth_node",
",",
"lower_depth_node",
"]",
")"
] | Returns the poing source geometry as a Node
:param point_source:
Point source model as an instance of the :class:
`openquake.hazardlib.source.point.PointSource`
:returns:
Instance of :class:`openquake.baselib.node.Node` | [
"Returns",
"the",
"poing",
"source",
"geometry",
"as",
"a",
"Node"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/sourcewriter.py#L61-L80 | train | 233,095 |
gem/oq-engine | openquake/hazardlib/sourcewriter.py | build_linestring_node | def build_linestring_node(line, with_depth=False):
"""
Parses a line to a Node class
:param line:
Line as instance of :class:`openquake.hazardlib.geo.line.Line`
:param bool with_depth:
Include the depth values (True) or not (False):
:returns:
Instance of :class:`openquake.baselib.node.Node`
"""
geom = []
for p in line.points:
if with_depth:
geom.extend((p.x, p.y, p.z))
else:
geom.extend((p.x, p.y))
poslist_node = Node("gml:posList", text=geom)
return Node("gml:LineString", nodes=[poslist_node]) | python | def build_linestring_node(line, with_depth=False):
"""
Parses a line to a Node class
:param line:
Line as instance of :class:`openquake.hazardlib.geo.line.Line`
:param bool with_depth:
Include the depth values (True) or not (False):
:returns:
Instance of :class:`openquake.baselib.node.Node`
"""
geom = []
for p in line.points:
if with_depth:
geom.extend((p.x, p.y, p.z))
else:
geom.extend((p.x, p.y))
poslist_node = Node("gml:posList", text=geom)
return Node("gml:LineString", nodes=[poslist_node]) | [
"def",
"build_linestring_node",
"(",
"line",
",",
"with_depth",
"=",
"False",
")",
":",
"geom",
"=",
"[",
"]",
"for",
"p",
"in",
"line",
".",
"points",
":",
"if",
"with_depth",
":",
"geom",
".",
"extend",
"(",
"(",
"p",
".",
"x",
",",
"p",
".",
"y",
",",
"p",
".",
"z",
")",
")",
"else",
":",
"geom",
".",
"extend",
"(",
"(",
"p",
".",
"x",
",",
"p",
".",
"y",
")",
")",
"poslist_node",
"=",
"Node",
"(",
"\"gml:posList\"",
",",
"text",
"=",
"geom",
")",
"return",
"Node",
"(",
"\"gml:LineString\"",
",",
"nodes",
"=",
"[",
"poslist_node",
"]",
")"
] | Parses a line to a Node class
:param line:
Line as instance of :class:`openquake.hazardlib.geo.line.Line`
:param bool with_depth:
Include the depth values (True) or not (False):
:returns:
Instance of :class:`openquake.baselib.node.Node` | [
"Parses",
"a",
"line",
"to",
"a",
"Node",
"class"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/sourcewriter.py#L83-L101 | train | 233,096 |
gem/oq-engine | openquake/hazardlib/sourcewriter.py | build_simple_fault_geometry | def build_simple_fault_geometry(fault_source):
"""
Returns the simple fault source geometry as a Node
:param fault_source:
Simple fault source model as an instance of the :class:
`openquake.hazardlib.source.simple_fault.SimpleFaultSource`
:returns:
Instance of :class:`openquake.baselib.node.Node`
"""
linestring_node = build_linestring_node(fault_source.fault_trace,
with_depth=False)
dip_node = Node("dip", text=fault_source.dip)
upper_depth_node = Node(
"upperSeismoDepth", text=fault_source.upper_seismogenic_depth)
lower_depth_node = Node(
"lowerSeismoDepth", text=fault_source.lower_seismogenic_depth)
return Node("simpleFaultGeometry",
nodes=[linestring_node, dip_node, upper_depth_node,
lower_depth_node]) | python | def build_simple_fault_geometry(fault_source):
"""
Returns the simple fault source geometry as a Node
:param fault_source:
Simple fault source model as an instance of the :class:
`openquake.hazardlib.source.simple_fault.SimpleFaultSource`
:returns:
Instance of :class:`openquake.baselib.node.Node`
"""
linestring_node = build_linestring_node(fault_source.fault_trace,
with_depth=False)
dip_node = Node("dip", text=fault_source.dip)
upper_depth_node = Node(
"upperSeismoDepth", text=fault_source.upper_seismogenic_depth)
lower_depth_node = Node(
"lowerSeismoDepth", text=fault_source.lower_seismogenic_depth)
return Node("simpleFaultGeometry",
nodes=[linestring_node, dip_node, upper_depth_node,
lower_depth_node]) | [
"def",
"build_simple_fault_geometry",
"(",
"fault_source",
")",
":",
"linestring_node",
"=",
"build_linestring_node",
"(",
"fault_source",
".",
"fault_trace",
",",
"with_depth",
"=",
"False",
")",
"dip_node",
"=",
"Node",
"(",
"\"dip\"",
",",
"text",
"=",
"fault_source",
".",
"dip",
")",
"upper_depth_node",
"=",
"Node",
"(",
"\"upperSeismoDepth\"",
",",
"text",
"=",
"fault_source",
".",
"upper_seismogenic_depth",
")",
"lower_depth_node",
"=",
"Node",
"(",
"\"lowerSeismoDepth\"",
",",
"text",
"=",
"fault_source",
".",
"lower_seismogenic_depth",
")",
"return",
"Node",
"(",
"\"simpleFaultGeometry\"",
",",
"nodes",
"=",
"[",
"linestring_node",
",",
"dip_node",
",",
"upper_depth_node",
",",
"lower_depth_node",
"]",
")"
] | Returns the simple fault source geometry as a Node
:param fault_source:
Simple fault source model as an instance of the :class:
`openquake.hazardlib.source.simple_fault.SimpleFaultSource`
:returns:
Instance of :class:`openquake.baselib.node.Node` | [
"Returns",
"the",
"simple",
"fault",
"source",
"geometry",
"as",
"a",
"Node"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/sourcewriter.py#L104-L123 | train | 233,097 |
gem/oq-engine | openquake/hazardlib/sourcewriter.py | build_complex_fault_geometry | def build_complex_fault_geometry(fault_source):
"""
Returns the complex fault source geometry as a Node
:param fault_source:
Complex fault source model as an instance of the :class:
`openquake.hazardlib.source.complex_fault.ComplexFaultSource`
:returns:
Instance of :class:`openquake.baselib.node.Node`
"""
num_edges = len(fault_source.edges)
edge_nodes = []
for iloc, edge in enumerate(fault_source.edges):
if iloc == 0:
# Top Edge
node_name = "faultTopEdge"
elif iloc == (num_edges - 1):
# Bottom edge
node_name = "faultBottomEdge"
else:
# Intermediate edge
node_name = "intermediateEdge"
edge_nodes.append(
Node(node_name,
nodes=[build_linestring_node(edge, with_depth=True)]))
return Node("complexFaultGeometry", nodes=edge_nodes) | python | def build_complex_fault_geometry(fault_source):
"""
Returns the complex fault source geometry as a Node
:param fault_source:
Complex fault source model as an instance of the :class:
`openquake.hazardlib.source.complex_fault.ComplexFaultSource`
:returns:
Instance of :class:`openquake.baselib.node.Node`
"""
num_edges = len(fault_source.edges)
edge_nodes = []
for iloc, edge in enumerate(fault_source.edges):
if iloc == 0:
# Top Edge
node_name = "faultTopEdge"
elif iloc == (num_edges - 1):
# Bottom edge
node_name = "faultBottomEdge"
else:
# Intermediate edge
node_name = "intermediateEdge"
edge_nodes.append(
Node(node_name,
nodes=[build_linestring_node(edge, with_depth=True)]))
return Node("complexFaultGeometry", nodes=edge_nodes) | [
"def",
"build_complex_fault_geometry",
"(",
"fault_source",
")",
":",
"num_edges",
"=",
"len",
"(",
"fault_source",
".",
"edges",
")",
"edge_nodes",
"=",
"[",
"]",
"for",
"iloc",
",",
"edge",
"in",
"enumerate",
"(",
"fault_source",
".",
"edges",
")",
":",
"if",
"iloc",
"==",
"0",
":",
"# Top Edge",
"node_name",
"=",
"\"faultTopEdge\"",
"elif",
"iloc",
"==",
"(",
"num_edges",
"-",
"1",
")",
":",
"# Bottom edge",
"node_name",
"=",
"\"faultBottomEdge\"",
"else",
":",
"# Intermediate edge",
"node_name",
"=",
"\"intermediateEdge\"",
"edge_nodes",
".",
"append",
"(",
"Node",
"(",
"node_name",
",",
"nodes",
"=",
"[",
"build_linestring_node",
"(",
"edge",
",",
"with_depth",
"=",
"True",
")",
"]",
")",
")",
"return",
"Node",
"(",
"\"complexFaultGeometry\"",
",",
"nodes",
"=",
"edge_nodes",
")"
] | Returns the complex fault source geometry as a Node
:param fault_source:
Complex fault source model as an instance of the :class:
`openquake.hazardlib.source.complex_fault.ComplexFaultSource`
:returns:
Instance of :class:`openquake.baselib.node.Node` | [
"Returns",
"the",
"complex",
"fault",
"source",
"geometry",
"as",
"a",
"Node"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/sourcewriter.py#L126-L152 | train | 233,098 |
gem/oq-engine | openquake/hazardlib/sourcewriter.py | build_evenly_discretised_mfd | def build_evenly_discretised_mfd(mfd):
"""
Returns the evenly discretized MFD as a Node
:param mfd:
MFD as instance of :class:
`openquake.hazardlib.mfd.evenly_discretized.EvenlyDiscretizedMFD`
:returns:
Instance of :class:`openquake.baselib.node.Node`
"""
occur_rates = Node("occurRates", text=mfd.occurrence_rates)
return Node("incrementalMFD",
{"binWidth": mfd.bin_width, "minMag": mfd.min_mag},
nodes=[occur_rates]) | python | def build_evenly_discretised_mfd(mfd):
"""
Returns the evenly discretized MFD as a Node
:param mfd:
MFD as instance of :class:
`openquake.hazardlib.mfd.evenly_discretized.EvenlyDiscretizedMFD`
:returns:
Instance of :class:`openquake.baselib.node.Node`
"""
occur_rates = Node("occurRates", text=mfd.occurrence_rates)
return Node("incrementalMFD",
{"binWidth": mfd.bin_width, "minMag": mfd.min_mag},
nodes=[occur_rates]) | [
"def",
"build_evenly_discretised_mfd",
"(",
"mfd",
")",
":",
"occur_rates",
"=",
"Node",
"(",
"\"occurRates\"",
",",
"text",
"=",
"mfd",
".",
"occurrence_rates",
")",
"return",
"Node",
"(",
"\"incrementalMFD\"",
",",
"{",
"\"binWidth\"",
":",
"mfd",
".",
"bin_width",
",",
"\"minMag\"",
":",
"mfd",
".",
"min_mag",
"}",
",",
"nodes",
"=",
"[",
"occur_rates",
"]",
")"
] | Returns the evenly discretized MFD as a Node
:param mfd:
MFD as instance of :class:
`openquake.hazardlib.mfd.evenly_discretized.EvenlyDiscretizedMFD`
:returns:
Instance of :class:`openquake.baselib.node.Node` | [
"Returns",
"the",
"evenly",
"discretized",
"MFD",
"as",
"a",
"Node"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/sourcewriter.py#L156-L169 | train | 233,099 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.