repo stringlengths 7 55 | path stringlengths 4 127 | func_name stringlengths 1 88 | original_string stringlengths 75 19.8k | language stringclasses 1
value | code stringlengths 75 19.8k | code_tokens listlengths 20 707 | docstring stringlengths 3 17.3k | docstring_tokens listlengths 3 222 | sha stringlengths 40 40 | url stringlengths 87 242 | partition stringclasses 1
value | idx int64 0 252k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|
gem/oq-engine | openquake/hazardlib/gsim/abrahamson_silva_2008.py | AbrahamsonSilva2008._get_stddevs | def _get_stddevs(self, C, C_PGA, pga1100, rup, sites, stddev_types):
"""
Return standard deviations as described in paragraph 'Equations for
standard deviation', page 81.
"""
std_intra = self._compute_intra_event_std(C, C_PGA, pga1100, rup.mag,
sites.vs30,
sites.vs30measured)
std_inter = self._compute_inter_event_std(C, C_PGA, pga1100, rup.mag,
sites.vs30)
stddevs = []
for stddev_type in stddev_types:
assert stddev_type in self.DEFINED_FOR_STANDARD_DEVIATION_TYPES
if stddev_type == const.StdDev.TOTAL:
stddevs.append(np.sqrt(std_intra ** 2 + std_inter ** 2))
elif stddev_type == const.StdDev.INTRA_EVENT:
stddevs.append(std_intra)
elif stddev_type == const.StdDev.INTER_EVENT:
stddevs.append(std_inter)
return stddevs | python | def _get_stddevs(self, C, C_PGA, pga1100, rup, sites, stddev_types):
"""
Return standard deviations as described in paragraph 'Equations for
standard deviation', page 81.
"""
std_intra = self._compute_intra_event_std(C, C_PGA, pga1100, rup.mag,
sites.vs30,
sites.vs30measured)
std_inter = self._compute_inter_event_std(C, C_PGA, pga1100, rup.mag,
sites.vs30)
stddevs = []
for stddev_type in stddev_types:
assert stddev_type in self.DEFINED_FOR_STANDARD_DEVIATION_TYPES
if stddev_type == const.StdDev.TOTAL:
stddevs.append(np.sqrt(std_intra ** 2 + std_inter ** 2))
elif stddev_type == const.StdDev.INTRA_EVENT:
stddevs.append(std_intra)
elif stddev_type == const.StdDev.INTER_EVENT:
stddevs.append(std_inter)
return stddevs | [
"def",
"_get_stddevs",
"(",
"self",
",",
"C",
",",
"C_PGA",
",",
"pga1100",
",",
"rup",
",",
"sites",
",",
"stddev_types",
")",
":",
"std_intra",
"=",
"self",
".",
"_compute_intra_event_std",
"(",
"C",
",",
"C_PGA",
",",
"pga1100",
",",
"rup",
".",
"ma... | Return standard deviations as described in paragraph 'Equations for
standard deviation', page 81. | [
"Return",
"standard",
"deviations",
"as",
"described",
"in",
"paragraph",
"Equations",
"for",
"standard",
"deviation",
"page",
"81",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/abrahamson_silva_2008.py#L277-L296 | train | 214,100 |
gem/oq-engine | openquake/hazardlib/gsim/abrahamson_silva_2008.py | AbrahamsonSilva2008._compute_inter_event_std | def _compute_inter_event_std(self, C, C_PGA, pga1100, mag, vs30):
"""
Compute inter event standard deviation, equation 25, page 82.
"""
tau_0 = self._compute_std_0(C['s3'], C['s4'], mag)
tau_b_pga = self._compute_std_0(C_PGA['s3'], C_PGA['s4'], mag)
delta_amp = self._compute_partial_derivative_site_amp(C, pga1100, vs30)
std_inter = np.sqrt(tau_0 ** 2 + (delta_amp ** 2) * (tau_b_pga ** 2) +
2 * delta_amp * tau_0 * tau_b_pga * C['rho'])
return std_inter | python | def _compute_inter_event_std(self, C, C_PGA, pga1100, mag, vs30):
"""
Compute inter event standard deviation, equation 25, page 82.
"""
tau_0 = self._compute_std_0(C['s3'], C['s4'], mag)
tau_b_pga = self._compute_std_0(C_PGA['s3'], C_PGA['s4'], mag)
delta_amp = self._compute_partial_derivative_site_amp(C, pga1100, vs30)
std_inter = np.sqrt(tau_0 ** 2 + (delta_amp ** 2) * (tau_b_pga ** 2) +
2 * delta_amp * tau_0 * tau_b_pga * C['rho'])
return std_inter | [
"def",
"_compute_inter_event_std",
"(",
"self",
",",
"C",
",",
"C_PGA",
",",
"pga1100",
",",
"mag",
",",
"vs30",
")",
":",
"tau_0",
"=",
"self",
".",
"_compute_std_0",
"(",
"C",
"[",
"'s3'",
"]",
",",
"C",
"[",
"'s4'",
"]",
",",
"mag",
")",
"tau_b_... | Compute inter event standard deviation, equation 25, page 82. | [
"Compute",
"inter",
"event",
"standard",
"deviation",
"equation",
"25",
"page",
"82",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/abrahamson_silva_2008.py#L314-L325 | train | 214,101 |
gem/oq-engine | openquake/hazardlib/gsim/abrahamson_silva_2008.py | AbrahamsonSilva2008._compute_sigma_b | def _compute_sigma_b(self, C, mag, vs30measured):
"""
Equation 23, page 81.
"""
sigma_0 = self._compute_sigma_0(C, mag, vs30measured)
sigma_amp = self.CONSTS['sigma_amp']
return np.sqrt(sigma_0 ** 2 - sigma_amp ** 2) | python | def _compute_sigma_b(self, C, mag, vs30measured):
"""
Equation 23, page 81.
"""
sigma_0 = self._compute_sigma_0(C, mag, vs30measured)
sigma_amp = self.CONSTS['sigma_amp']
return np.sqrt(sigma_0 ** 2 - sigma_amp ** 2) | [
"def",
"_compute_sigma_b",
"(",
"self",
",",
"C",
",",
"mag",
",",
"vs30measured",
")",
":",
"sigma_0",
"=",
"self",
".",
"_compute_sigma_0",
"(",
"C",
",",
"mag",
",",
"vs30measured",
")",
"sigma_amp",
"=",
"self",
".",
"CONSTS",
"[",
"'sigma_amp'",
"]"... | Equation 23, page 81. | [
"Equation",
"23",
"page",
"81",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/abrahamson_silva_2008.py#L327-L334 | train | 214,102 |
gem/oq-engine | openquake/hazardlib/gsim/abrahamson_silva_2008.py | AbrahamsonSilva2008._compute_sigma_0 | def _compute_sigma_0(self, C, mag, vs30measured):
"""
Equation 27, page 82.
"""
s1 = np.zeros_like(vs30measured, dtype=float)
s2 = np.zeros_like(vs30measured, dtype=float)
idx = vs30measured == 1
s1[idx] = C['s1mea']
s2[idx] = C['s2mea']
idx = vs30measured == 0
s1[idx] = C['s1est']
s2[idx] = C['s2est']
return self._compute_std_0(s1, s2, mag) | python | def _compute_sigma_0(self, C, mag, vs30measured):
"""
Equation 27, page 82.
"""
s1 = np.zeros_like(vs30measured, dtype=float)
s2 = np.zeros_like(vs30measured, dtype=float)
idx = vs30measured == 1
s1[idx] = C['s1mea']
s2[idx] = C['s2mea']
idx = vs30measured == 0
s1[idx] = C['s1est']
s2[idx] = C['s2est']
return self._compute_std_0(s1, s2, mag) | [
"def",
"_compute_sigma_0",
"(",
"self",
",",
"C",
",",
"mag",
",",
"vs30measured",
")",
":",
"s1",
"=",
"np",
".",
"zeros_like",
"(",
"vs30measured",
",",
"dtype",
"=",
"float",
")",
"s2",
"=",
"np",
".",
"zeros_like",
"(",
"vs30measured",
",",
"dtype"... | Equation 27, page 82. | [
"Equation",
"27",
"page",
"82",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/abrahamson_silva_2008.py#L336-L351 | train | 214,103 |
gem/oq-engine | openquake/hazardlib/gsim/abrahamson_silva_2008.py | AbrahamsonSilva2008._compute_std_0 | def _compute_std_0(self, c1, c2, mag):
"""
Common part of equations 27 and 28, pag 82.
"""
if mag < 5:
return c1
elif mag >= 5 and mag <= 7:
return c1 + (c2 - c1) * (mag - 5) / 2
else:
return c2 | python | def _compute_std_0(self, c1, c2, mag):
"""
Common part of equations 27 and 28, pag 82.
"""
if mag < 5:
return c1
elif mag >= 5 and mag <= 7:
return c1 + (c2 - c1) * (mag - 5) / 2
else:
return c2 | [
"def",
"_compute_std_0",
"(",
"self",
",",
"c1",
",",
"c2",
",",
"mag",
")",
":",
"if",
"mag",
"<",
"5",
":",
"return",
"c1",
"elif",
"mag",
">=",
"5",
"and",
"mag",
"<=",
"7",
":",
"return",
"c1",
"+",
"(",
"c2",
"-",
"c1",
")",
"*",
"(",
... | Common part of equations 27 and 28, pag 82. | [
"Common",
"part",
"of",
"equations",
"27",
"and",
"28",
"pag",
"82",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/abrahamson_silva_2008.py#L353-L362 | train | 214,104 |
gem/oq-engine | openquake/hazardlib/gsim/abrahamson_silva_2008.py | AbrahamsonSilva2008._compute_a21_factor | def _compute_a21_factor(self, C, imt, z1pt0, vs30):
"""
Compute and return a21 factor, equation 18, page 80.
"""
e2 = self._compute_e2_factor(imt, vs30)
a21 = e2.copy()
vs30_star, v1 = self._compute_vs30_star_factor(imt, vs30)
median_z1pt0 = self._compute_median_z1pt0(vs30)
numerator = ((C['a10'] + C['b'] * self.CONSTS['n']) *
np.log(vs30_star / np.min([v1, 1000])))
denominator = np.log((z1pt0 + self.CONSTS['c2']) /
(median_z1pt0 + self.CONSTS['c2']))
idx = numerator + e2 * denominator < 0
a21[idx] = - numerator[idx] / denominator[idx]
idx = vs30 >= 1000
a21[idx] = 0.0
return a21 | python | def _compute_a21_factor(self, C, imt, z1pt0, vs30):
"""
Compute and return a21 factor, equation 18, page 80.
"""
e2 = self._compute_e2_factor(imt, vs30)
a21 = e2.copy()
vs30_star, v1 = self._compute_vs30_star_factor(imt, vs30)
median_z1pt0 = self._compute_median_z1pt0(vs30)
numerator = ((C['a10'] + C['b'] * self.CONSTS['n']) *
np.log(vs30_star / np.min([v1, 1000])))
denominator = np.log((z1pt0 + self.CONSTS['c2']) /
(median_z1pt0 + self.CONSTS['c2']))
idx = numerator + e2 * denominator < 0
a21[idx] = - numerator[idx] / denominator[idx]
idx = vs30 >= 1000
a21[idx] = 0.0
return a21 | [
"def",
"_compute_a21_factor",
"(",
"self",
",",
"C",
",",
"imt",
",",
"z1pt0",
",",
"vs30",
")",
":",
"e2",
"=",
"self",
".",
"_compute_e2_factor",
"(",
"imt",
",",
"vs30",
")",
"a21",
"=",
"e2",
".",
"copy",
"(",
")",
"vs30_star",
",",
"v1",
"=",
... | Compute and return a21 factor, equation 18, page 80. | [
"Compute",
"and",
"return",
"a21",
"factor",
"equation",
"18",
"page",
"80",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/abrahamson_silva_2008.py#L383-L404 | train | 214,105 |
gem/oq-engine | openquake/hazardlib/gsim/abrahamson_silva_2008.py | AbrahamsonSilva2008._compute_vs30_star_factor | def _compute_vs30_star_factor(self, imt, vs30):
"""
Compute and return vs30 star factor, equation 5, page 77.
"""
v1 = self._compute_v1_factor(imt)
vs30_star = vs30.copy()
vs30_star[vs30_star >= v1] = v1
return vs30_star, v1 | python | def _compute_vs30_star_factor(self, imt, vs30):
"""
Compute and return vs30 star factor, equation 5, page 77.
"""
v1 = self._compute_v1_factor(imt)
vs30_star = vs30.copy()
vs30_star[vs30_star >= v1] = v1
return vs30_star, v1 | [
"def",
"_compute_vs30_star_factor",
"(",
"self",
",",
"imt",
",",
"vs30",
")",
":",
"v1",
"=",
"self",
".",
"_compute_v1_factor",
"(",
"imt",
")",
"vs30_star",
"=",
"vs30",
".",
"copy",
"(",
")",
"vs30_star",
"[",
"vs30_star",
">=",
"v1",
"]",
"=",
"v1... | Compute and return vs30 star factor, equation 5, page 77. | [
"Compute",
"and",
"return",
"vs30",
"star",
"factor",
"equation",
"5",
"page",
"77",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/abrahamson_silva_2008.py#L406-L414 | train | 214,106 |
gem/oq-engine | openquake/hazardlib/gsim/abrahamson_silva_2008.py | AbrahamsonSilva2008._compute_v1_factor | def _compute_v1_factor(self, imt):
"""
Compute and return v1 factor, equation 6, page 77.
"""
if imt.name == "SA":
t = imt.period
if t <= 0.50:
v1 = 1500.0
elif t > 0.50 and t <= 1.0:
v1 = np.exp(8.0 - 0.795 * np.log(t / 0.21))
elif t > 1.0 and t < 2.0:
v1 = np.exp(6.76 - 0.297 * np.log(t))
else:
v1 = 700.0
elif imt.name == "PGA":
v1 = 1500.0
else:
# this is for PGV
v1 = 862.0
return v1 | python | def _compute_v1_factor(self, imt):
"""
Compute and return v1 factor, equation 6, page 77.
"""
if imt.name == "SA":
t = imt.period
if t <= 0.50:
v1 = 1500.0
elif t > 0.50 and t <= 1.0:
v1 = np.exp(8.0 - 0.795 * np.log(t / 0.21))
elif t > 1.0 and t < 2.0:
v1 = np.exp(6.76 - 0.297 * np.log(t))
else:
v1 = 700.0
elif imt.name == "PGA":
v1 = 1500.0
else:
# this is for PGV
v1 = 862.0
return v1 | [
"def",
"_compute_v1_factor",
"(",
"self",
",",
"imt",
")",
":",
"if",
"imt",
".",
"name",
"==",
"\"SA\"",
":",
"t",
"=",
"imt",
".",
"period",
"if",
"t",
"<=",
"0.50",
":",
"v1",
"=",
"1500.0",
"elif",
"t",
">",
"0.50",
"and",
"t",
"<=",
"1.0",
... | Compute and return v1 factor, equation 6, page 77. | [
"Compute",
"and",
"return",
"v1",
"factor",
"equation",
"6",
"page",
"77",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/abrahamson_silva_2008.py#L416-L436 | train | 214,107 |
gem/oq-engine | openquake/hazardlib/gsim/abrahamson_silva_2008.py | AbrahamsonSilva2008._compute_e2_factor | def _compute_e2_factor(self, imt, vs30):
"""
Compute and return e2 factor, equation 19, page 80.
"""
e2 = np.zeros_like(vs30)
if imt.name == "PGV":
period = 1
elif imt.name == "PGA":
period = 0
else:
period = imt.period
if period < 0.35:
return e2
else:
idx = vs30 <= 1000
if period >= 0.35 and period <= 2.0:
e2[idx] = (-0.25 * np.log(vs30[idx] / 1000) *
np.log(period / 0.35))
elif period > 2.0:
e2[idx] = (-0.25 * np.log(vs30[idx] / 1000) *
np.log(2.0 / 0.35))
return e2 | python | def _compute_e2_factor(self, imt, vs30):
"""
Compute and return e2 factor, equation 19, page 80.
"""
e2 = np.zeros_like(vs30)
if imt.name == "PGV":
period = 1
elif imt.name == "PGA":
period = 0
else:
period = imt.period
if period < 0.35:
return e2
else:
idx = vs30 <= 1000
if period >= 0.35 and period <= 2.0:
e2[idx] = (-0.25 * np.log(vs30[idx] / 1000) *
np.log(period / 0.35))
elif period > 2.0:
e2[idx] = (-0.25 * np.log(vs30[idx] / 1000) *
np.log(2.0 / 0.35))
return e2 | [
"def",
"_compute_e2_factor",
"(",
"self",
",",
"imt",
",",
"vs30",
")",
":",
"e2",
"=",
"np",
".",
"zeros_like",
"(",
"vs30",
")",
"if",
"imt",
".",
"name",
"==",
"\"PGV\"",
":",
"period",
"=",
"1",
"elif",
"imt",
".",
"name",
"==",
"\"PGA\"",
":",... | Compute and return e2 factor, equation 19, page 80. | [
"Compute",
"and",
"return",
"e2",
"factor",
"equation",
"19",
"page",
"80",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/abrahamson_silva_2008.py#L438-L461 | train | 214,108 |
gem/oq-engine | openquake/hazardlib/gsim/abrahamson_silva_2008.py | AbrahamsonSilva2008._compute_a22_factor | def _compute_a22_factor(self, imt):
"""
Compute and return the a22 factor, equation 20, page 80.
"""
if imt.name == 'PGV':
return 0.0
period = imt.period
if period < 2.0:
return 0.0
else:
return 0.0625 * (period - 2.0) | python | def _compute_a22_factor(self, imt):
"""
Compute and return the a22 factor, equation 20, page 80.
"""
if imt.name == 'PGV':
return 0.0
period = imt.period
if period < 2.0:
return 0.0
else:
return 0.0625 * (period - 2.0) | [
"def",
"_compute_a22_factor",
"(",
"self",
",",
"imt",
")",
":",
"if",
"imt",
".",
"name",
"==",
"'PGV'",
":",
"return",
"0.0",
"period",
"=",
"imt",
".",
"period",
"if",
"period",
"<",
"2.0",
":",
"return",
"0.0",
"else",
":",
"return",
"0.0625",
"*... | Compute and return the a22 factor, equation 20, page 80. | [
"Compute",
"and",
"return",
"the",
"a22",
"factor",
"equation",
"20",
"page",
"80",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/abrahamson_silva_2008.py#L477-L487 | train | 214,109 |
gem/oq-engine | openquake/hazardlib/gsim/youngs_1997.py | YoungsEtAl1997SInter._compute_mean | def _compute_mean(self, C, A1, A2, A3, A4, A5, A6, mag, hypo_depth,
rrup, mean, idx):
"""
Compute mean for subduction interface events, as explained in table 2,
page 67.
"""
mean[idx] = (A1 + A2 * mag + C['C1'] + C['C2'] * (A3 - mag) ** 3 +
C['C3'] * np.log(rrup[idx] + A4 * np.exp(A5 * mag)) +
A6 * hypo_depth) | python | def _compute_mean(self, C, A1, A2, A3, A4, A5, A6, mag, hypo_depth,
rrup, mean, idx):
"""
Compute mean for subduction interface events, as explained in table 2,
page 67.
"""
mean[idx] = (A1 + A2 * mag + C['C1'] + C['C2'] * (A3 - mag) ** 3 +
C['C3'] * np.log(rrup[idx] + A4 * np.exp(A5 * mag)) +
A6 * hypo_depth) | [
"def",
"_compute_mean",
"(",
"self",
",",
"C",
",",
"A1",
",",
"A2",
",",
"A3",
",",
"A4",
",",
"A5",
",",
"A6",
",",
"mag",
",",
"hypo_depth",
",",
"rrup",
",",
"mean",
",",
"idx",
")",
":",
"mean",
"[",
"idx",
"]",
"=",
"(",
"A1",
"+",
"A... | Compute mean for subduction interface events, as explained in table 2,
page 67. | [
"Compute",
"mean",
"for",
"subduction",
"interface",
"events",
"as",
"explained",
"in",
"table",
"2",
"page",
"67",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/youngs_1997.py#L121-L129 | train | 214,110 |
gem/oq-engine | openquake/hazardlib/gsim/youngs_1997.py | YoungsEtAl1997SInter._compute_std | def _compute_std(self, C, mag, stddevs, idx):
"""
Compute total standard deviation, as explained in table 2, page 67.
"""
if mag > 8.0:
mag = 8.0
for stddev in stddevs:
stddev[idx] += C['C4'] + C['C5'] * mag | python | def _compute_std(self, C, mag, stddevs, idx):
"""
Compute total standard deviation, as explained in table 2, page 67.
"""
if mag > 8.0:
mag = 8.0
for stddev in stddevs:
stddev[idx] += C['C4'] + C['C5'] * mag | [
"def",
"_compute_std",
"(",
"self",
",",
"C",
",",
"mag",
",",
"stddevs",
",",
"idx",
")",
":",
"if",
"mag",
">",
"8.0",
":",
"mag",
"=",
"8.0",
"for",
"stddev",
"in",
"stddevs",
":",
"stddev",
"[",
"idx",
"]",
"+=",
"C",
"[",
"'C4'",
"]",
"+",... | Compute total standard deviation, as explained in table 2, page 67. | [
"Compute",
"total",
"standard",
"deviation",
"as",
"explained",
"in",
"table",
"2",
"page",
"67",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/youngs_1997.py#L131-L139 | train | 214,111 |
gem/oq-engine | openquake/hmtk/plotting/mapping.py | HMTKBaseMap._build_basemap | def _build_basemap(self):
'''
Creates the map according to the input configuration
'''
if self.config['min_lon'] >= self.config['max_lon']:
raise ValueError('Upper limit of long is smaller than lower limit')
if self.config['min_lon'] >= self.config['max_lon']:
raise ValueError('Upper limit of long is smaller than lower limit')
# Corners of the map
lowcrnrlat = self.config['min_lat']
lowcrnrlon = self.config['min_lon']
uppcrnrlat = self.config['max_lat']
uppcrnrlon = self.config['max_lon']
if 'resolution' not in self.config.keys():
self.config['resolution'] = 'l'
lat0 = lowcrnrlat + ((uppcrnrlat - lowcrnrlat) / 2)
lon0 = lowcrnrlon + ((uppcrnrlon - lowcrnrlon) / 2)
if (uppcrnrlat - lowcrnrlat) >= (uppcrnrlon - lowcrnrlon):
fig_aspect = PORTRAIT_ASPECT
else:
fig_aspect = LANDSCAPE_ASPECT
if self.ax is None:
self.fig, self.ax = plt.subplots(figsize=fig_aspect,
facecolor='w',
edgecolor='k')
else:
self.fig = self.ax.get_figure()
if self.title:
self.ax.set_title(self.title, fontsize=16)
parallels = np.arange(-90., 90., self.lat_lon_spacing)
meridians = np.arange(0., 360., self.lat_lon_spacing)
# Build Map
# Do not import Basemap at top level since it's an optional feature
# and it would break doctests
from mpl_toolkits.basemap import Basemap
self.m = Basemap(
llcrnrlon=lowcrnrlon, llcrnrlat=lowcrnrlat,
urcrnrlon=uppcrnrlon, urcrnrlat=uppcrnrlat,
projection='stere', resolution=self.config['resolution'],
area_thresh=1000.0, lat_0=lat0, lon_0=lon0, ax=self.ax)
self.m.drawcountries()
self.m.drawmapboundary()
self.m.drawcoastlines()
self.m.drawstates()
self.m.drawparallels(parallels, labels=[1, 0, 0, 0], fontsize=12)
self.m.drawmeridians(meridians, labels=[0, 0, 0, 1], fontsize=12)
self.m.fillcontinents(color='wheat') | python | def _build_basemap(self):
'''
Creates the map according to the input configuration
'''
if self.config['min_lon'] >= self.config['max_lon']:
raise ValueError('Upper limit of long is smaller than lower limit')
if self.config['min_lon'] >= self.config['max_lon']:
raise ValueError('Upper limit of long is smaller than lower limit')
# Corners of the map
lowcrnrlat = self.config['min_lat']
lowcrnrlon = self.config['min_lon']
uppcrnrlat = self.config['max_lat']
uppcrnrlon = self.config['max_lon']
if 'resolution' not in self.config.keys():
self.config['resolution'] = 'l'
lat0 = lowcrnrlat + ((uppcrnrlat - lowcrnrlat) / 2)
lon0 = lowcrnrlon + ((uppcrnrlon - lowcrnrlon) / 2)
if (uppcrnrlat - lowcrnrlat) >= (uppcrnrlon - lowcrnrlon):
fig_aspect = PORTRAIT_ASPECT
else:
fig_aspect = LANDSCAPE_ASPECT
if self.ax is None:
self.fig, self.ax = plt.subplots(figsize=fig_aspect,
facecolor='w',
edgecolor='k')
else:
self.fig = self.ax.get_figure()
if self.title:
self.ax.set_title(self.title, fontsize=16)
parallels = np.arange(-90., 90., self.lat_lon_spacing)
meridians = np.arange(0., 360., self.lat_lon_spacing)
# Build Map
# Do not import Basemap at top level since it's an optional feature
# and it would break doctests
from mpl_toolkits.basemap import Basemap
self.m = Basemap(
llcrnrlon=lowcrnrlon, llcrnrlat=lowcrnrlat,
urcrnrlon=uppcrnrlon, urcrnrlat=uppcrnrlat,
projection='stere', resolution=self.config['resolution'],
area_thresh=1000.0, lat_0=lat0, lon_0=lon0, ax=self.ax)
self.m.drawcountries()
self.m.drawmapboundary()
self.m.drawcoastlines()
self.m.drawstates()
self.m.drawparallels(parallels, labels=[1, 0, 0, 0], fontsize=12)
self.m.drawmeridians(meridians, labels=[0, 0, 0, 1], fontsize=12)
self.m.fillcontinents(color='wheat') | [
"def",
"_build_basemap",
"(",
"self",
")",
":",
"if",
"self",
".",
"config",
"[",
"'min_lon'",
"]",
">=",
"self",
".",
"config",
"[",
"'max_lon'",
"]",
":",
"raise",
"ValueError",
"(",
"'Upper limit of long is smaller than lower limit'",
")",
"if",
"self",
"."... | Creates the map according to the input configuration | [
"Creates",
"the",
"map",
"according",
"to",
"the",
"input",
"configuration"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/plotting/mapping.py#L124-L175 | train | 214,112 |
gem/oq-engine | openquake/hmtk/plotting/mapping.py | HMTKBaseMap.savemap | def savemap(self, filename, filetype='png', papertype="a4"):
"""
Save the figure
"""
self.fig.savefig(filename,
dpi=self.dpi,
format=filetype,
papertype=papertype) | python | def savemap(self, filename, filetype='png', papertype="a4"):
"""
Save the figure
"""
self.fig.savefig(filename,
dpi=self.dpi,
format=filetype,
papertype=papertype) | [
"def",
"savemap",
"(",
"self",
",",
"filename",
",",
"filetype",
"=",
"'png'",
",",
"papertype",
"=",
"\"a4\"",
")",
":",
"self",
".",
"fig",
".",
"savefig",
"(",
"filename",
",",
"dpi",
"=",
"self",
".",
"dpi",
",",
"format",
"=",
"filetype",
",",
... | Save the figure | [
"Save",
"the",
"figure"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/plotting/mapping.py#L177-L184 | train | 214,113 |
gem/oq-engine | openquake/hmtk/plotting/mapping.py | HMTKBaseMap.add_source_model | def add_source_model(
self, model, area_border='k-', border_width=1.0,
point_marker='ks', point_size=2.0, overlay=False, min_depth=0.,
max_depth=None, alpha=1.0):
"""
Adds a source model to the map
:param model:
Source model of mixed typologies as instance of :class:
openquake.hmtk.sources.source_model.mtkSourceModel
"""
for source in model.sources:
if isinstance(source, mtkAreaSource):
self._plot_area_source(source, area_border, border_width)
elif isinstance(source, mtkPointSource):
self._plot_point_source(source, point_marker, point_size)
elif isinstance(source, mtkComplexFaultSource):
self._plot_complex_fault(source, area_border, border_width,
min_depth, max_depth, alpha)
elif isinstance(source, mtkSimpleFaultSource):
self._plot_simple_fault(source, area_border, border_width)
else:
pass
if not overlay:
plt.show() | python | def add_source_model(
self, model, area_border='k-', border_width=1.0,
point_marker='ks', point_size=2.0, overlay=False, min_depth=0.,
max_depth=None, alpha=1.0):
"""
Adds a source model to the map
:param model:
Source model of mixed typologies as instance of :class:
openquake.hmtk.sources.source_model.mtkSourceModel
"""
for source in model.sources:
if isinstance(source, mtkAreaSource):
self._plot_area_source(source, area_border, border_width)
elif isinstance(source, mtkPointSource):
self._plot_point_source(source, point_marker, point_size)
elif isinstance(source, mtkComplexFaultSource):
self._plot_complex_fault(source, area_border, border_width,
min_depth, max_depth, alpha)
elif isinstance(source, mtkSimpleFaultSource):
self._plot_simple_fault(source, area_border, border_width)
else:
pass
if not overlay:
plt.show() | [
"def",
"add_source_model",
"(",
"self",
",",
"model",
",",
"area_border",
"=",
"'k-'",
",",
"border_width",
"=",
"1.0",
",",
"point_marker",
"=",
"'ks'",
",",
"point_size",
"=",
"2.0",
",",
"overlay",
"=",
"False",
",",
"min_depth",
"=",
"0.",
",",
"max_... | Adds a source model to the map
:param model:
Source model of mixed typologies as instance of :class:
openquake.hmtk.sources.source_model.mtkSourceModel | [
"Adds",
"a",
"source",
"model",
"to",
"the",
"map"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/plotting/mapping.py#L337-L361 | train | 214,114 |
gem/oq-engine | openquake/hmtk/plotting/mapping.py | HMTKBaseMap.add_colour_scaled_points | def add_colour_scaled_points(self, longitude, latitude, data, shape='s',
alpha=1.0, size=20, norm=None, overlay=False):
"""
Overlays a set of points on a map with a fixed size but colour scaled
according to the data
:param np.ndarray longitude:
Longitude
:param np.ndarray latitude:
Latitude
:param np.ndarray data:
Data for plotting
:param str shape:
Marker style
:param float alpha:
Sets the transparency of the marker (0 for transparent, 1 opaque)
:param int size:
Marker size
:param norm:
Normalisation as instance of :class: matplotlib.colors.Normalize
"""
if not norm:
norm = Normalize(vmin=np.min(data), vmax=np.max(data))
x, y, = self.m(longitude, latitude)
mappable = self.m.scatter(x, y,
marker=shape,
s=size,
c=data,
norm=norm,
alpha=alpha,
linewidths=0.0,
zorder=4)
self.m.colorbar(mappable=mappable, fig=self.fig, ax=self.ax)
if not overlay:
plt.show() | python | def add_colour_scaled_points(self, longitude, latitude, data, shape='s',
alpha=1.0, size=20, norm=None, overlay=False):
"""
Overlays a set of points on a map with a fixed size but colour scaled
according to the data
:param np.ndarray longitude:
Longitude
:param np.ndarray latitude:
Latitude
:param np.ndarray data:
Data for plotting
:param str shape:
Marker style
:param float alpha:
Sets the transparency of the marker (0 for transparent, 1 opaque)
:param int size:
Marker size
:param norm:
Normalisation as instance of :class: matplotlib.colors.Normalize
"""
if not norm:
norm = Normalize(vmin=np.min(data), vmax=np.max(data))
x, y, = self.m(longitude, latitude)
mappable = self.m.scatter(x, y,
marker=shape,
s=size,
c=data,
norm=norm,
alpha=alpha,
linewidths=0.0,
zorder=4)
self.m.colorbar(mappable=mappable, fig=self.fig, ax=self.ax)
if not overlay:
plt.show() | [
"def",
"add_colour_scaled_points",
"(",
"self",
",",
"longitude",
",",
"latitude",
",",
"data",
",",
"shape",
"=",
"'s'",
",",
"alpha",
"=",
"1.0",
",",
"size",
"=",
"20",
",",
"norm",
"=",
"None",
",",
"overlay",
"=",
"False",
")",
":",
"if",
"not",... | Overlays a set of points on a map with a fixed size but colour scaled
according to the data
:param np.ndarray longitude:
Longitude
:param np.ndarray latitude:
Latitude
:param np.ndarray data:
Data for plotting
:param str shape:
Marker style
:param float alpha:
Sets the transparency of the marker (0 for transparent, 1 opaque)
:param int size:
Marker size
:param norm:
Normalisation as instance of :class: matplotlib.colors.Normalize | [
"Overlays",
"a",
"set",
"of",
"points",
"on",
"a",
"map",
"with",
"a",
"fixed",
"size",
"but",
"colour",
"scaled",
"according",
"to",
"the",
"data"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/plotting/mapping.py#L363-L397 | train | 214,115 |
gem/oq-engine | openquake/hmtk/plotting/mapping.py | HMTKBaseMap.add_size_scaled_points | def add_size_scaled_points(
self, longitude, latitude, data, shape='o',
logplot=False, alpha=1.0, colour='b', smin=2.0, sscale=2.0,
overlay=False):
"""
Plots a set of points with size scaled according to the data
:param bool logplot:
Choose to scale according to the logarithm (base 10) of the data
:param float smin:
Minimum scale size
:param float sscale:
Scaling factor
"""
if logplot:
data = np.log10(data.copy())
x, y, = self.m(longitude, latitude)
self.m.scatter(x, y,
marker=shape,
s=(smin + data ** sscale),
c=colour,
alpha=alpha,
zorder=2)
if not overlay:
plt.show() | python | def add_size_scaled_points(
self, longitude, latitude, data, shape='o',
logplot=False, alpha=1.0, colour='b', smin=2.0, sscale=2.0,
overlay=False):
"""
Plots a set of points with size scaled according to the data
:param bool logplot:
Choose to scale according to the logarithm (base 10) of the data
:param float smin:
Minimum scale size
:param float sscale:
Scaling factor
"""
if logplot:
data = np.log10(data.copy())
x, y, = self.m(longitude, latitude)
self.m.scatter(x, y,
marker=shape,
s=(smin + data ** sscale),
c=colour,
alpha=alpha,
zorder=2)
if not overlay:
plt.show() | [
"def",
"add_size_scaled_points",
"(",
"self",
",",
"longitude",
",",
"latitude",
",",
"data",
",",
"shape",
"=",
"'o'",
",",
"logplot",
"=",
"False",
",",
"alpha",
"=",
"1.0",
",",
"colour",
"=",
"'b'",
",",
"smin",
"=",
"2.0",
",",
"sscale",
"=",
"2... | Plots a set of points with size scaled according to the data
:param bool logplot:
Choose to scale according to the logarithm (base 10) of the data
:param float smin:
Minimum scale size
:param float sscale:
Scaling factor | [
"Plots",
"a",
"set",
"of",
"points",
"with",
"size",
"scaled",
"according",
"to",
"the",
"data"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/plotting/mapping.py#L399-L424 | train | 214,116 |
gem/oq-engine | openquake/hmtk/plotting/mapping.py | HMTKBaseMap.add_catalogue_cluster | def add_catalogue_cluster(self, catalogue, vcl, flagvector,
cluster_id=None, overlay=True):
"""
Creates a plot of a catalogue showing where particular clusters exist
"""
# Create simple magnitude scaled point basemap
self.add_size_scaled_points(catalogue.data['longitude'],
catalogue.data['latitude'],
catalogue.data['magnitude'],
shape="o",
alpha=0.8,
colour=(0.5, 0.5, 0.5),
smin=1.0,
sscale=1.5,
overlay=True)
# If cluster ID is not specified just show mainshocks
if cluster_id is None:
idx = flagvector == 0
self.add_size_scaled_points(catalogue.data['longitude'][idx],
catalogue.data['latitude'][idx],
catalogue.data['magnitude'][idx],
shape="o",
colour="r",
smin=1.0,
sscale=1.5,
overlay=overlay)
return
if not isinstance(cluster_id, collections.Iterable):
cluster_id = [cluster_id]
for iloc, clid in enumerate(cluster_id):
if iloc == (len(cluster_id) - 1):
# On last iteration set overlay to function overlay
temp_overlay = overlay
else:
temp_overlay = True
idx = vcl == clid
self.add_size_scaled_points(
catalogue.data["longitude"][idx],
catalogue.data["latitude"][idx],
catalogue.data["magnitude"][idx],
shape="o",
colour=DISSIMILAR_COLOURLIST[(iloc + 1) % NCOLS],
smin=1.0,
sscale=1.5,
overlay=temp_overlay) | python | def add_catalogue_cluster(self, catalogue, vcl, flagvector,
cluster_id=None, overlay=True):
"""
Creates a plot of a catalogue showing where particular clusters exist
"""
# Create simple magnitude scaled point basemap
self.add_size_scaled_points(catalogue.data['longitude'],
catalogue.data['latitude'],
catalogue.data['magnitude'],
shape="o",
alpha=0.8,
colour=(0.5, 0.5, 0.5),
smin=1.0,
sscale=1.5,
overlay=True)
# If cluster ID is not specified just show mainshocks
if cluster_id is None:
idx = flagvector == 0
self.add_size_scaled_points(catalogue.data['longitude'][idx],
catalogue.data['latitude'][idx],
catalogue.data['magnitude'][idx],
shape="o",
colour="r",
smin=1.0,
sscale=1.5,
overlay=overlay)
return
if not isinstance(cluster_id, collections.Iterable):
cluster_id = [cluster_id]
for iloc, clid in enumerate(cluster_id):
if iloc == (len(cluster_id) - 1):
# On last iteration set overlay to function overlay
temp_overlay = overlay
else:
temp_overlay = True
idx = vcl == clid
self.add_size_scaled_points(
catalogue.data["longitude"][idx],
catalogue.data["latitude"][idx],
catalogue.data["magnitude"][idx],
shape="o",
colour=DISSIMILAR_COLOURLIST[(iloc + 1) % NCOLS],
smin=1.0,
sscale=1.5,
overlay=temp_overlay) | [
"def",
"add_catalogue_cluster",
"(",
"self",
",",
"catalogue",
",",
"vcl",
",",
"flagvector",
",",
"cluster_id",
"=",
"None",
",",
"overlay",
"=",
"True",
")",
":",
"# Create simple magnitude scaled point basemap",
"self",
".",
"add_size_scaled_points",
"(",
"catalo... | Creates a plot of a catalogue showing where particular clusters exist | [
"Creates",
"a",
"plot",
"of",
"a",
"catalogue",
"showing",
"where",
"particular",
"clusters",
"exist"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/plotting/mapping.py#L480-L524 | train | 214,117 |
gem/oq-engine | openquake/hazardlib/gsim/campbell_bozorgnia_2003.py | CampbellBozorgnia2003NSHMP2007._get_stddevs | def _get_stddevs(self, C, mag, stddev_types, num_sites):
"""
Return standard deviation as defined in eq.11 page 319.
"""
std = C['c16'] + np.zeros(num_sites)
if mag < 7.4:
std -= 0.07 * mag
else:
std -= 0.518
# only the 'total' standard deviation is supported, therefore the
# std is always the same for all types
stddevs = [std for _ in stddev_types]
return stddevs | python | def _get_stddevs(self, C, mag, stddev_types, num_sites):
"""
Return standard deviation as defined in eq.11 page 319.
"""
std = C['c16'] + np.zeros(num_sites)
if mag < 7.4:
std -= 0.07 * mag
else:
std -= 0.518
# only the 'total' standard deviation is supported, therefore the
# std is always the same for all types
stddevs = [std for _ in stddev_types]
return stddevs | [
"def",
"_get_stddevs",
"(",
"self",
",",
"C",
",",
"mag",
",",
"stddev_types",
",",
"num_sites",
")",
":",
"std",
"=",
"C",
"[",
"'c16'",
"]",
"+",
"np",
".",
"zeros",
"(",
"num_sites",
")",
"if",
"mag",
"<",
"7.4",
":",
"std",
"-=",
"0.07",
"*",... | Return standard deviation as defined in eq.11 page 319. | [
"Return",
"standard",
"deviation",
"as",
"defined",
"in",
"eq",
".",
"11",
"page",
"319",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/campbell_bozorgnia_2003.py#L112-L127 | train | 214,118 |
gem/oq-engine | openquake/baselib/general.py | distinct | def distinct(keys):
"""
Return the distinct keys in order.
"""
known = set()
outlist = []
for key in keys:
if key not in known:
outlist.append(key)
known.add(key)
return outlist | python | def distinct(keys):
"""
Return the distinct keys in order.
"""
known = set()
outlist = []
for key in keys:
if key not in known:
outlist.append(key)
known.add(key)
return outlist | [
"def",
"distinct",
"(",
"keys",
")",
":",
"known",
"=",
"set",
"(",
")",
"outlist",
"=",
"[",
"]",
"for",
"key",
"in",
"keys",
":",
"if",
"key",
"not",
"in",
"known",
":",
"outlist",
".",
"append",
"(",
"key",
")",
"known",
".",
"add",
"(",
"ke... | Return the distinct keys in order. | [
"Return",
"the",
"distinct",
"keys",
"in",
"order",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/general.py#L171-L181 | train | 214,119 |
gem/oq-engine | openquake/baselib/general.py | split_in_blocks | def split_in_blocks(sequence, hint, weight=lambda item: 1, key=nokey):
"""
Split the `sequence` in a number of WeightedSequences close to `hint`.
:param sequence: a finite sequence of items
:param hint: an integer suggesting the number of subsequences to generate
:param weight: a function returning the weigth of a given item
:param key: a function returning the key of a given item
The WeightedSequences are of homogeneous key and they try to be
balanced in weight. For instance
>>> items = 'ABCDE'
>>> list(split_in_blocks(items, 3))
[<WeightedSequence ['A', 'B'], weight=2>, <WeightedSequence ['C', 'D'], weight=2>, <WeightedSequence ['E'], weight=1>]
"""
if isinstance(sequence, int):
return split_in_slices(sequence, hint)
elif hint in (0, 1) and key is nokey: # do not split
return [sequence]
elif hint in (0, 1): # split by key
blocks = []
for k, group in groupby(sequence, key).items():
blocks.append(group)
return blocks
items = sorted(sequence, key=lambda item: (key(item), weight(item)))
assert hint > 0, hint
assert len(items) > 0, len(items)
total_weight = float(sum(weight(item) for item in items))
return block_splitter(items, math.ceil(total_weight / hint), weight, key) | python | def split_in_blocks(sequence, hint, weight=lambda item: 1, key=nokey):
"""
Split the `sequence` in a number of WeightedSequences close to `hint`.
:param sequence: a finite sequence of items
:param hint: an integer suggesting the number of subsequences to generate
:param weight: a function returning the weigth of a given item
:param key: a function returning the key of a given item
The WeightedSequences are of homogeneous key and they try to be
balanced in weight. For instance
>>> items = 'ABCDE'
>>> list(split_in_blocks(items, 3))
[<WeightedSequence ['A', 'B'], weight=2>, <WeightedSequence ['C', 'D'], weight=2>, <WeightedSequence ['E'], weight=1>]
"""
if isinstance(sequence, int):
return split_in_slices(sequence, hint)
elif hint in (0, 1) and key is nokey: # do not split
return [sequence]
elif hint in (0, 1): # split by key
blocks = []
for k, group in groupby(sequence, key).items():
blocks.append(group)
return blocks
items = sorted(sequence, key=lambda item: (key(item), weight(item)))
assert hint > 0, hint
assert len(items) > 0, len(items)
total_weight = float(sum(weight(item) for item in items))
return block_splitter(items, math.ceil(total_weight / hint), weight, key) | [
"def",
"split_in_blocks",
"(",
"sequence",
",",
"hint",
",",
"weight",
"=",
"lambda",
"item",
":",
"1",
",",
"key",
"=",
"nokey",
")",
":",
"if",
"isinstance",
"(",
"sequence",
",",
"int",
")",
":",
"return",
"split_in_slices",
"(",
"sequence",
",",
"h... | Split the `sequence` in a number of WeightedSequences close to `hint`.
:param sequence: a finite sequence of items
:param hint: an integer suggesting the number of subsequences to generate
:param weight: a function returning the weigth of a given item
:param key: a function returning the key of a given item
The WeightedSequences are of homogeneous key and they try to be
balanced in weight. For instance
>>> items = 'ABCDE'
>>> list(split_in_blocks(items, 3))
[<WeightedSequence ['A', 'B'], weight=2>, <WeightedSequence ['C', 'D'], weight=2>, <WeightedSequence ['E'], weight=1>] | [
"Split",
"the",
"sequence",
"in",
"a",
"number",
"of",
"WeightedSequences",
"close",
"to",
"hint",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/general.py#L274-L304 | train | 214,120 |
gem/oq-engine | openquake/baselib/general.py | gettemp | def gettemp(content=None, dir=None, prefix="tmp", suffix="tmp"):
"""Create temporary file with the given content.
Please note: the temporary file must be deleted by the caller.
:param string content: the content to write to the temporary file.
:param string dir: directory where the file should be created
:param string prefix: file name prefix
:param string suffix: file name suffix
:returns: a string with the path to the temporary file
"""
if dir is not None:
if not os.path.exists(dir):
os.makedirs(dir)
fh, path = tempfile.mkstemp(dir=dir, prefix=prefix, suffix=suffix)
_tmp_paths.append(path)
if content:
fh = os.fdopen(fh, "wb")
if hasattr(content, 'encode'):
content = content.encode('utf8')
fh.write(content)
fh.close()
return path | python | def gettemp(content=None, dir=None, prefix="tmp", suffix="tmp"):
"""Create temporary file with the given content.
Please note: the temporary file must be deleted by the caller.
:param string content: the content to write to the temporary file.
:param string dir: directory where the file should be created
:param string prefix: file name prefix
:param string suffix: file name suffix
:returns: a string with the path to the temporary file
"""
if dir is not None:
if not os.path.exists(dir):
os.makedirs(dir)
fh, path = tempfile.mkstemp(dir=dir, prefix=prefix, suffix=suffix)
_tmp_paths.append(path)
if content:
fh = os.fdopen(fh, "wb")
if hasattr(content, 'encode'):
content = content.encode('utf8')
fh.write(content)
fh.close()
return path | [
"def",
"gettemp",
"(",
"content",
"=",
"None",
",",
"dir",
"=",
"None",
",",
"prefix",
"=",
"\"tmp\"",
",",
"suffix",
"=",
"\"tmp\"",
")",
":",
"if",
"dir",
"is",
"not",
"None",
":",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"dir",
")",... | Create temporary file with the given content.
Please note: the temporary file must be deleted by the caller.
:param string content: the content to write to the temporary file.
:param string dir: directory where the file should be created
:param string prefix: file name prefix
:param string suffix: file name suffix
:returns: a string with the path to the temporary file | [
"Create",
"temporary",
"file",
"with",
"the",
"given",
"content",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/general.py#L356-L378 | train | 214,121 |
gem/oq-engine | openquake/baselib/general.py | removetmp | def removetmp():
"""
Remove the temporary files created by gettemp
"""
for path in _tmp_paths:
if os.path.exists(path): # not removed yet
try:
os.remove(path)
except PermissionError:
pass | python | def removetmp():
"""
Remove the temporary files created by gettemp
"""
for path in _tmp_paths:
if os.path.exists(path): # not removed yet
try:
os.remove(path)
except PermissionError:
pass | [
"def",
"removetmp",
"(",
")",
":",
"for",
"path",
"in",
"_tmp_paths",
":",
"if",
"os",
".",
"path",
".",
"exists",
"(",
"path",
")",
":",
"# not removed yet",
"try",
":",
"os",
".",
"remove",
"(",
"path",
")",
"except",
"PermissionError",
":",
"pass"
] | Remove the temporary files created by gettemp | [
"Remove",
"the",
"temporary",
"files",
"created",
"by",
"gettemp"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/general.py#L382-L391 | train | 214,122 |
gem/oq-engine | openquake/baselib/general.py | run_in_process | def run_in_process(code, *args):
"""
Run in an external process the given Python code and return the
output as a Python object. If there are arguments, then code is
taken as a template and traditional string interpolation is performed.
:param code: string or template describing Python code
:param args: arguments to be used for interpolation
:returns: the output of the process, as a Python object
"""
if args:
code %= args
try:
out = subprocess.check_output([sys.executable, '-c', code])
except subprocess.CalledProcessError as exc:
print(exc.cmd[-1], file=sys.stderr)
raise
if out:
return eval(out, {}, {}) | python | def run_in_process(code, *args):
"""
Run in an external process the given Python code and return the
output as a Python object. If there are arguments, then code is
taken as a template and traditional string interpolation is performed.
:param code: string or template describing Python code
:param args: arguments to be used for interpolation
:returns: the output of the process, as a Python object
"""
if args:
code %= args
try:
out = subprocess.check_output([sys.executable, '-c', code])
except subprocess.CalledProcessError as exc:
print(exc.cmd[-1], file=sys.stderr)
raise
if out:
return eval(out, {}, {}) | [
"def",
"run_in_process",
"(",
"code",
",",
"*",
"args",
")",
":",
"if",
"args",
":",
"code",
"%=",
"args",
"try",
":",
"out",
"=",
"subprocess",
".",
"check_output",
"(",
"[",
"sys",
".",
"executable",
",",
"'-c'",
",",
"code",
"]",
")",
"except",
... | Run in an external process the given Python code and return the
output as a Python object. If there are arguments, then code is
taken as a template and traditional string interpolation is performed.
:param code: string or template describing Python code
:param args: arguments to be used for interpolation
:returns: the output of the process, as a Python object | [
"Run",
"in",
"an",
"external",
"process",
"the",
"given",
"Python",
"code",
"and",
"return",
"the",
"output",
"as",
"a",
"Python",
"object",
".",
"If",
"there",
"are",
"arguments",
"then",
"code",
"is",
"taken",
"as",
"a",
"template",
"and",
"traditional",... | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/general.py#L422-L440 | train | 214,123 |
gem/oq-engine | openquake/baselib/general.py | import_all | def import_all(module_or_package):
"""
If `module_or_package` is a module, just import it; if it is a package,
recursively imports all the modules it contains. Returns the names of
the modules that were imported as a set. The set can be empty if
the modules were already in sys.modules.
"""
already_imported = set(sys.modules)
mod_or_pkg = importlib.import_module(module_or_package)
if not hasattr(mod_or_pkg, '__path__'): # is a simple module
return set(sys.modules) - already_imported
# else import all modules contained in the package
[pkg_path] = mod_or_pkg.__path__
n = len(pkg_path)
for cwd, dirs, files in os.walk(pkg_path):
if all(os.path.basename(f) != '__init__.py' for f in files):
# the current working directory is not a subpackage
continue
for f in files:
if f.endswith('.py'):
# convert PKGPATH/subpackage/module.py -> subpackage.module
# works at any level of nesting
modname = (module_or_package + cwd[n:].replace(os.sep, '.') +
'.' + os.path.basename(f[:-3]))
importlib.import_module(modname)
return set(sys.modules) - already_imported | python | def import_all(module_or_package):
"""
If `module_or_package` is a module, just import it; if it is a package,
recursively imports all the modules it contains. Returns the names of
the modules that were imported as a set. The set can be empty if
the modules were already in sys.modules.
"""
already_imported = set(sys.modules)
mod_or_pkg = importlib.import_module(module_or_package)
if not hasattr(mod_or_pkg, '__path__'): # is a simple module
return set(sys.modules) - already_imported
# else import all modules contained in the package
[pkg_path] = mod_or_pkg.__path__
n = len(pkg_path)
for cwd, dirs, files in os.walk(pkg_path):
if all(os.path.basename(f) != '__init__.py' for f in files):
# the current working directory is not a subpackage
continue
for f in files:
if f.endswith('.py'):
# convert PKGPATH/subpackage/module.py -> subpackage.module
# works at any level of nesting
modname = (module_or_package + cwd[n:].replace(os.sep, '.') +
'.' + os.path.basename(f[:-3]))
importlib.import_module(modname)
return set(sys.modules) - already_imported | [
"def",
"import_all",
"(",
"module_or_package",
")",
":",
"already_imported",
"=",
"set",
"(",
"sys",
".",
"modules",
")",
"mod_or_pkg",
"=",
"importlib",
".",
"import_module",
"(",
"module_or_package",
")",
"if",
"not",
"hasattr",
"(",
"mod_or_pkg",
",",
"'__p... | If `module_or_package` is a module, just import it; if it is a package,
recursively imports all the modules it contains. Returns the names of
the modules that were imported as a set. The set can be empty if
the modules were already in sys.modules. | [
"If",
"module_or_package",
"is",
"a",
"module",
"just",
"import",
"it",
";",
"if",
"it",
"is",
"a",
"package",
"recursively",
"imports",
"all",
"the",
"modules",
"it",
"contains",
".",
"Returns",
"the",
"names",
"of",
"the",
"modules",
"that",
"were",
"imp... | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/general.py#L447-L472 | train | 214,124 |
gem/oq-engine | openquake/baselib/general.py | get_array | def get_array(array, **kw):
"""
Extract a subarray by filtering on the given keyword arguments
"""
for name, value in kw.items():
array = array[array[name] == value]
return array | python | def get_array(array, **kw):
"""
Extract a subarray by filtering on the given keyword arguments
"""
for name, value in kw.items():
array = array[array[name] == value]
return array | [
"def",
"get_array",
"(",
"array",
",",
"*",
"*",
"kw",
")",
":",
"for",
"name",
",",
"value",
"in",
"kw",
".",
"items",
"(",
")",
":",
"array",
"=",
"array",
"[",
"array",
"[",
"name",
"]",
"==",
"value",
"]",
"return",
"array"
] | Extract a subarray by filtering on the given keyword arguments | [
"Extract",
"a",
"subarray",
"by",
"filtering",
"on",
"the",
"given",
"keyword",
"arguments"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/general.py#L890-L896 | train | 214,125 |
gem/oq-engine | openquake/baselib/general.py | not_equal | def not_equal(array_or_none1, array_or_none2):
"""
Compare two arrays that can also be None or have diffent shapes
and returns a boolean.
>>> a1 = numpy.array([1])
>>> a2 = numpy.array([2])
>>> a3 = numpy.array([2, 3])
>>> not_equal(a1, a2)
True
>>> not_equal(a1, a3)
True
>>> not_equal(a1, None)
True
"""
if array_or_none1 is None and array_or_none2 is None:
return False
elif array_or_none1 is None and array_or_none2 is not None:
return True
elif array_or_none1 is not None and array_or_none2 is None:
return True
if array_or_none1.shape != array_or_none2.shape:
return True
return (array_or_none1 != array_or_none2).any() | python | def not_equal(array_or_none1, array_or_none2):
"""
Compare two arrays that can also be None or have diffent shapes
and returns a boolean.
>>> a1 = numpy.array([1])
>>> a2 = numpy.array([2])
>>> a3 = numpy.array([2, 3])
>>> not_equal(a1, a2)
True
>>> not_equal(a1, a3)
True
>>> not_equal(a1, None)
True
"""
if array_or_none1 is None and array_or_none2 is None:
return False
elif array_or_none1 is None and array_or_none2 is not None:
return True
elif array_or_none1 is not None and array_or_none2 is None:
return True
if array_or_none1.shape != array_or_none2.shape:
return True
return (array_or_none1 != array_or_none2).any() | [
"def",
"not_equal",
"(",
"array_or_none1",
",",
"array_or_none2",
")",
":",
"if",
"array_or_none1",
"is",
"None",
"and",
"array_or_none2",
"is",
"None",
":",
"return",
"False",
"elif",
"array_or_none1",
"is",
"None",
"and",
"array_or_none2",
"is",
"not",
"None",... | Compare two arrays that can also be None or have diffent shapes
and returns a boolean.
>>> a1 = numpy.array([1])
>>> a2 = numpy.array([2])
>>> a3 = numpy.array([2, 3])
>>> not_equal(a1, a2)
True
>>> not_equal(a1, a3)
True
>>> not_equal(a1, None)
True | [
"Compare",
"two",
"arrays",
"that",
"can",
"also",
"be",
"None",
"or",
"have",
"diffent",
"shapes",
"and",
"returns",
"a",
"boolean",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/general.py#L899-L922 | train | 214,126 |
gem/oq-engine | openquake/baselib/general.py | humansize | def humansize(nbytes, suffixes=('B', 'KB', 'MB', 'GB', 'TB', 'PB')):
"""
Return file size in a human-friendly format
"""
if nbytes == 0:
return '0 B'
i = 0
while nbytes >= 1024 and i < len(suffixes) - 1:
nbytes /= 1024.
i += 1
f = ('%.2f' % nbytes).rstrip('0').rstrip('.')
return '%s %s' % (f, suffixes[i]) | python | def humansize(nbytes, suffixes=('B', 'KB', 'MB', 'GB', 'TB', 'PB')):
"""
Return file size in a human-friendly format
"""
if nbytes == 0:
return '0 B'
i = 0
while nbytes >= 1024 and i < len(suffixes) - 1:
nbytes /= 1024.
i += 1
f = ('%.2f' % nbytes).rstrip('0').rstrip('.')
return '%s %s' % (f, suffixes[i]) | [
"def",
"humansize",
"(",
"nbytes",
",",
"suffixes",
"=",
"(",
"'B'",
",",
"'KB'",
",",
"'MB'",
",",
"'GB'",
",",
"'TB'",
",",
"'PB'",
")",
")",
":",
"if",
"nbytes",
"==",
"0",
":",
"return",
"'0 B'",
"i",
"=",
"0",
"while",
"nbytes",
">=",
"1024"... | Return file size in a human-friendly format | [
"Return",
"file",
"size",
"in",
"a",
"human",
"-",
"friendly",
"format"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/general.py#L925-L936 | train | 214,127 |
gem/oq-engine | openquake/baselib/general.py | deprecated | def deprecated(func, msg='', *args, **kw):
"""
A family of decorators to mark deprecated functions.
:param msg:
the message to print the first time the
deprecated function is used.
Here is an example of usage:
>>> @deprecated(msg='Use new_function instead')
... def old_function():
... 'Do something'
Notice that if the function is called several time, the deprecation
warning will be displayed only the first time.
"""
msg = '%s.%s has been deprecated. %s' % (
func.__module__, func.__name__, msg)
if not hasattr(func, 'called'):
warnings.warn(msg, DeprecationWarning, stacklevel=2)
func.called = 0
func.called += 1
return func(*args, **kw) | python | def deprecated(func, msg='', *args, **kw):
"""
A family of decorators to mark deprecated functions.
:param msg:
the message to print the first time the
deprecated function is used.
Here is an example of usage:
>>> @deprecated(msg='Use new_function instead')
... def old_function():
... 'Do something'
Notice that if the function is called several time, the deprecation
warning will be displayed only the first time.
"""
msg = '%s.%s has been deprecated. %s' % (
func.__module__, func.__name__, msg)
if not hasattr(func, 'called'):
warnings.warn(msg, DeprecationWarning, stacklevel=2)
func.called = 0
func.called += 1
return func(*args, **kw) | [
"def",
"deprecated",
"(",
"func",
",",
"msg",
"=",
"''",
",",
"*",
"args",
",",
"*",
"*",
"kw",
")",
":",
"msg",
"=",
"'%s.%s has been deprecated. %s'",
"%",
"(",
"func",
".",
"__module__",
",",
"func",
".",
"__name__",
",",
"msg",
")",
"if",
"not",
... | A family of decorators to mark deprecated functions.
:param msg:
the message to print the first time the
deprecated function is used.
Here is an example of usage:
>>> @deprecated(msg='Use new_function instead')
... def old_function():
... 'Do something'
Notice that if the function is called several time, the deprecation
warning will be displayed only the first time. | [
"A",
"family",
"of",
"decorators",
"to",
"mark",
"deprecated",
"functions",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/general.py#L947-L970 | train | 214,128 |
gem/oq-engine | openquake/baselib/general.py | random_histogram | def random_histogram(counts, nbins, seed):
"""
Distribute a total number of counts on a set of bins homogenously.
>>> random_histogram(1, 2, 42)
array([1, 0])
>>> random_histogram(100, 5, 42)
array([28, 18, 17, 19, 18])
>>> random_histogram(10000, 5, 42)
array([2043, 2015, 2050, 1930, 1962])
"""
numpy.random.seed(seed)
return numpy.histogram(numpy.random.random(counts), nbins, (0, 1))[0] | python | def random_histogram(counts, nbins, seed):
"""
Distribute a total number of counts on a set of bins homogenously.
>>> random_histogram(1, 2, 42)
array([1, 0])
>>> random_histogram(100, 5, 42)
array([28, 18, 17, 19, 18])
>>> random_histogram(10000, 5, 42)
array([2043, 2015, 2050, 1930, 1962])
"""
numpy.random.seed(seed)
return numpy.histogram(numpy.random.random(counts), nbins, (0, 1))[0] | [
"def",
"random_histogram",
"(",
"counts",
",",
"nbins",
",",
"seed",
")",
":",
"numpy",
".",
"random",
".",
"seed",
"(",
"seed",
")",
"return",
"numpy",
".",
"histogram",
"(",
"numpy",
".",
"random",
".",
"random",
"(",
"counts",
")",
",",
"nbins",
"... | Distribute a total number of counts on a set of bins homogenously.
>>> random_histogram(1, 2, 42)
array([1, 0])
>>> random_histogram(100, 5, 42)
array([28, 18, 17, 19, 18])
>>> random_histogram(10000, 5, 42)
array([2043, 2015, 2050, 1930, 1962]) | [
"Distribute",
"a",
"total",
"number",
"of",
"counts",
"on",
"a",
"set",
"of",
"bins",
"homogenously",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/general.py#L988-L1000 | train | 214,129 |
gem/oq-engine | openquake/baselib/general.py | safeprint | def safeprint(*args, **kwargs):
"""
Convert and print characters using the proper encoding
"""
new_args = []
# when stdout is redirected to a file, python 2 uses ascii for the writer;
# python 3 uses what is configured in the system (i.e. 'utf-8')
# if sys.stdout is replaced by a StringIO instance, Python 2 does not
# have an attribute 'encoding', and we assume ascii in that case
str_encoding = getattr(sys.stdout, 'encoding', None) or 'ascii'
for s in args:
new_args.append(s.encode('utf-8').decode(str_encoding, 'ignore'))
return print(*new_args, **kwargs) | python | def safeprint(*args, **kwargs):
"""
Convert and print characters using the proper encoding
"""
new_args = []
# when stdout is redirected to a file, python 2 uses ascii for the writer;
# python 3 uses what is configured in the system (i.e. 'utf-8')
# if sys.stdout is replaced by a StringIO instance, Python 2 does not
# have an attribute 'encoding', and we assume ascii in that case
str_encoding = getattr(sys.stdout, 'encoding', None) or 'ascii'
for s in args:
new_args.append(s.encode('utf-8').decode(str_encoding, 'ignore'))
return print(*new_args, **kwargs) | [
"def",
"safeprint",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"new_args",
"=",
"[",
"]",
"# when stdout is redirected to a file, python 2 uses ascii for the writer;",
"# python 3 uses what is configured in the system (i.e. 'utf-8')",
"# if sys.stdout is replaced by a Str... | Convert and print characters using the proper encoding | [
"Convert",
"and",
"print",
"characters",
"using",
"the",
"proper",
"encoding"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/general.py#L1020-L1033 | train | 214,130 |
gem/oq-engine | openquake/baselib/general.py | zipfiles | def zipfiles(fnames, archive, mode='w', log=lambda msg: None, cleanup=False):
"""
Build a zip archive from the given file names.
:param fnames: list of path names
:param archive: path of the archive
"""
prefix = len(os.path.commonprefix([os.path.dirname(f) for f in fnames]))
with zipfile.ZipFile(
archive, mode, zipfile.ZIP_DEFLATED, allowZip64=True) as z:
for f in fnames:
log('Archiving %s' % f)
z.write(f, f[prefix:])
if cleanup: # remove the zipped file
os.remove(f)
log('Generated %s' % archive)
return archive | python | def zipfiles(fnames, archive, mode='w', log=lambda msg: None, cleanup=False):
"""
Build a zip archive from the given file names.
:param fnames: list of path names
:param archive: path of the archive
"""
prefix = len(os.path.commonprefix([os.path.dirname(f) for f in fnames]))
with zipfile.ZipFile(
archive, mode, zipfile.ZIP_DEFLATED, allowZip64=True) as z:
for f in fnames:
log('Archiving %s' % f)
z.write(f, f[prefix:])
if cleanup: # remove the zipped file
os.remove(f)
log('Generated %s' % archive)
return archive | [
"def",
"zipfiles",
"(",
"fnames",
",",
"archive",
",",
"mode",
"=",
"'w'",
",",
"log",
"=",
"lambda",
"msg",
":",
"None",
",",
"cleanup",
"=",
"False",
")",
":",
"prefix",
"=",
"len",
"(",
"os",
".",
"path",
".",
"commonprefix",
"(",
"[",
"os",
"... | Build a zip archive from the given file names.
:param fnames: list of path names
:param archive: path of the archive | [
"Build",
"a",
"zip",
"archive",
"from",
"the",
"given",
"file",
"names",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/general.py#L1073-L1089 | train | 214,131 |
gem/oq-engine | openquake/baselib/general.py | println | def println(msg):
"""
Convenience function to print messages on a single line in the terminal
"""
sys.stdout.write(msg)
sys.stdout.flush()
sys.stdout.write('\x08' * len(msg))
sys.stdout.flush() | python | def println(msg):
"""
Convenience function to print messages on a single line in the terminal
"""
sys.stdout.write(msg)
sys.stdout.flush()
sys.stdout.write('\x08' * len(msg))
sys.stdout.flush() | [
"def",
"println",
"(",
"msg",
")",
":",
"sys",
".",
"stdout",
".",
"write",
"(",
"msg",
")",
"sys",
".",
"stdout",
".",
"flush",
"(",
")",
"sys",
".",
"stdout",
".",
"write",
"(",
"'\\x08'",
"*",
"len",
"(",
"msg",
")",
")",
"sys",
".",
"stdout... | Convenience function to print messages on a single line in the terminal | [
"Convenience",
"function",
"to",
"print",
"messages",
"on",
"a",
"single",
"line",
"in",
"the",
"terminal"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/general.py#L1108-L1115 | train | 214,132 |
gem/oq-engine | openquake/baselib/general.py | warn | def warn(msg, *args):
"""
Print a warning on stderr
"""
if not args:
sys.stderr.write('WARNING: ' + msg)
else:
sys.stderr.write('WARNING: ' + msg % args) | python | def warn(msg, *args):
"""
Print a warning on stderr
"""
if not args:
sys.stderr.write('WARNING: ' + msg)
else:
sys.stderr.write('WARNING: ' + msg % args) | [
"def",
"warn",
"(",
"msg",
",",
"*",
"args",
")",
":",
"if",
"not",
"args",
":",
"sys",
".",
"stderr",
".",
"write",
"(",
"'WARNING: '",
"+",
"msg",
")",
"else",
":",
"sys",
".",
"stderr",
".",
"write",
"(",
"'WARNING: '",
"+",
"msg",
"%",
"args"... | Print a warning on stderr | [
"Print",
"a",
"warning",
"on",
"stderr"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/general.py#L1131-L1138 | train | 214,133 |
gem/oq-engine | openquake/baselib/general.py | WeightedSequence.insert | def insert(self, i, item_weight):
"""
Insert an item with the given weight in the sequence
"""
item, weight = item_weight
self._seq.insert(i, item)
self.weight += weight | python | def insert(self, i, item_weight):
"""
Insert an item with the given weight in the sequence
"""
item, weight = item_weight
self._seq.insert(i, item)
self.weight += weight | [
"def",
"insert",
"(",
"self",
",",
"i",
",",
"item_weight",
")",
":",
"item",
",",
"weight",
"=",
"item_weight",
"self",
".",
"_seq",
".",
"insert",
"(",
"i",
",",
"item",
")",
"self",
".",
"weight",
"+=",
"weight"
] | Insert an item with the given weight in the sequence | [
"Insert",
"an",
"item",
"with",
"the",
"given",
"weight",
"in",
"the",
"sequence"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/general.py#L143-L149 | train | 214,134 |
gem/oq-engine | openquake/baselib/general.py | CallableDict.add | def add(self, *keys):
"""
Return a decorator registering a new implementation for the
CallableDict for the given keys.
"""
def decorator(func):
for key in keys:
self[key] = func
return func
return decorator | python | def add(self, *keys):
"""
Return a decorator registering a new implementation for the
CallableDict for the given keys.
"""
def decorator(func):
for key in keys:
self[key] = func
return func
return decorator | [
"def",
"add",
"(",
"self",
",",
"*",
"keys",
")",
":",
"def",
"decorator",
"(",
"func",
")",
":",
"for",
"key",
"in",
"keys",
":",
"self",
"[",
"key",
"]",
"=",
"func",
"return",
"func",
"return",
"decorator"
] | Return a decorator registering a new implementation for the
CallableDict for the given keys. | [
"Return",
"a",
"decorator",
"registering",
"a",
"new",
"implementation",
"for",
"the",
"CallableDict",
"for",
"the",
"given",
"keys",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/general.py#L542-L551 | train | 214,135 |
gem/oq-engine | openquake/hazardlib/gsim/bindi_2014.py | BindiEtAl2014Rjb._get_magnitude_scaling_term | def _get_magnitude_scaling_term(self, C, mag):
"""
Returns the magnitude scaling term of the GMPE described in
equation 3
"""
dmag = mag - self.CONSTS["Mh"]
if mag < self.CONSTS["Mh"]:
return C["e1"] + (C["b1"] * dmag) + (C["b2"] * (dmag ** 2.0))
else:
return C["e1"] + (C["b3"] * dmag) | python | def _get_magnitude_scaling_term(self, C, mag):
"""
Returns the magnitude scaling term of the GMPE described in
equation 3
"""
dmag = mag - self.CONSTS["Mh"]
if mag < self.CONSTS["Mh"]:
return C["e1"] + (C["b1"] * dmag) + (C["b2"] * (dmag ** 2.0))
else:
return C["e1"] + (C["b3"] * dmag) | [
"def",
"_get_magnitude_scaling_term",
"(",
"self",
",",
"C",
",",
"mag",
")",
":",
"dmag",
"=",
"mag",
"-",
"self",
".",
"CONSTS",
"[",
"\"Mh\"",
"]",
"if",
"mag",
"<",
"self",
".",
"CONSTS",
"[",
"\"Mh\"",
"]",
":",
"return",
"C",
"[",
"\"e1\"",
"... | Returns the magnitude scaling term of the GMPE described in
equation 3 | [
"Returns",
"the",
"magnitude",
"scaling",
"term",
"of",
"the",
"GMPE",
"described",
"in",
"equation",
"3"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/bindi_2014.py#L125-L134 | train | 214,136 |
gem/oq-engine | openquake/hazardlib/gsim/bindi_2014.py | BindiEtAl2014Rjb._get_distance_scaling_term | def _get_distance_scaling_term(self, C, rval, mag):
"""
Returns the distance scaling term of the GMPE described in equation 2
"""
r_adj = np.sqrt(rval ** 2.0 + C["h"] ** 2.0)
return (
(C["c1"] + C["c2"] * (mag - self.CONSTS["Mref"])) *
np.log10(r_adj / self.CONSTS["Rref"]) -
(C["c3"] * (r_adj - self.CONSTS["Rref"]))) | python | def _get_distance_scaling_term(self, C, rval, mag):
"""
Returns the distance scaling term of the GMPE described in equation 2
"""
r_adj = np.sqrt(rval ** 2.0 + C["h"] ** 2.0)
return (
(C["c1"] + C["c2"] * (mag - self.CONSTS["Mref"])) *
np.log10(r_adj / self.CONSTS["Rref"]) -
(C["c3"] * (r_adj - self.CONSTS["Rref"]))) | [
"def",
"_get_distance_scaling_term",
"(",
"self",
",",
"C",
",",
"rval",
",",
"mag",
")",
":",
"r_adj",
"=",
"np",
".",
"sqrt",
"(",
"rval",
"**",
"2.0",
"+",
"C",
"[",
"\"h\"",
"]",
"**",
"2.0",
")",
"return",
"(",
"(",
"C",
"[",
"\"c1\"",
"]",
... | Returns the distance scaling term of the GMPE described in equation 2 | [
"Returns",
"the",
"distance",
"scaling",
"term",
"of",
"the",
"GMPE",
"described",
"in",
"equation",
"2"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/bindi_2014.py#L136-L144 | train | 214,137 |
gem/oq-engine | openquake/hazardlib/gsim/bindi_2014.py | BindiEtAl2014Rjb._get_site_amplification_term | def _get_site_amplification_term(self, C, vs30):
"""
Returns the site amplification term for the case in which Vs30
is used directly
"""
return C["gamma"] * np.log10(vs30 / self.CONSTS["Vref"]) | python | def _get_site_amplification_term(self, C, vs30):
"""
Returns the site amplification term for the case in which Vs30
is used directly
"""
return C["gamma"] * np.log10(vs30 / self.CONSTS["Vref"]) | [
"def",
"_get_site_amplification_term",
"(",
"self",
",",
"C",
",",
"vs30",
")",
":",
"return",
"C",
"[",
"\"gamma\"",
"]",
"*",
"np",
".",
"log10",
"(",
"vs30",
"/",
"self",
".",
"CONSTS",
"[",
"\"Vref\"",
"]",
")"
] | Returns the site amplification term for the case in which Vs30
is used directly | [
"Returns",
"the",
"site",
"amplification",
"term",
"for",
"the",
"case",
"in",
"which",
"Vs30",
"is",
"used",
"directly"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/bindi_2014.py#L169-L174 | train | 214,138 |
gem/oq-engine | openquake/hazardlib/gsim/bindi_2014.py | BindiEtAl2014RjbEC8._get_site_amplification_term | def _get_site_amplification_term(self, C, vs30):
"""
Returns the site amplification given Eurocode 8 site classification
"""
f_s = np.zeros_like(vs30)
# Site class B
idx = np.logical_and(vs30 < 800.0, vs30 >= 360.0)
f_s[idx] = C["eB"]
# Site Class C
idx = np.logical_and(vs30 < 360.0, vs30 >= 180.0)
f_s[idx] = C["eC"]
# Site Class D
idx = vs30 < 180.0
f_s[idx] = C["eD"]
return f_s | python | def _get_site_amplification_term(self, C, vs30):
"""
Returns the site amplification given Eurocode 8 site classification
"""
f_s = np.zeros_like(vs30)
# Site class B
idx = np.logical_and(vs30 < 800.0, vs30 >= 360.0)
f_s[idx] = C["eB"]
# Site Class C
idx = np.logical_and(vs30 < 360.0, vs30 >= 180.0)
f_s[idx] = C["eC"]
# Site Class D
idx = vs30 < 180.0
f_s[idx] = C["eD"]
return f_s | [
"def",
"_get_site_amplification_term",
"(",
"self",
",",
"C",
",",
"vs30",
")",
":",
"f_s",
"=",
"np",
".",
"zeros_like",
"(",
"vs30",
")",
"# Site class B",
"idx",
"=",
"np",
".",
"logical_and",
"(",
"vs30",
"<",
"800.0",
",",
"vs30",
">=",
"360.0",
"... | Returns the site amplification given Eurocode 8 site classification | [
"Returns",
"the",
"site",
"amplification",
"given",
"Eurocode",
"8",
"site",
"classification"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/bindi_2014.py#L234-L248 | train | 214,139 |
gem/oq-engine | openquake/hazardlib/gsim/bindi_2014.py | BindiEtAl2014RjbEC8NoSOF._get_mean | def _get_mean(self, C, rup, dists, sites):
"""
Returns the mean value of ground motion - noting that in this case
the style-of-faulting term is neglected
"""
return (self._get_magnitude_scaling_term(C, rup.mag) +
self._get_distance_scaling_term(C, dists.rjb, rup.mag) +
self._get_site_amplification_term(C, sites.vs30)) | python | def _get_mean(self, C, rup, dists, sites):
"""
Returns the mean value of ground motion - noting that in this case
the style-of-faulting term is neglected
"""
return (self._get_magnitude_scaling_term(C, rup.mag) +
self._get_distance_scaling_term(C, dists.rjb, rup.mag) +
self._get_site_amplification_term(C, sites.vs30)) | [
"def",
"_get_mean",
"(",
"self",
",",
"C",
",",
"rup",
",",
"dists",
",",
"sites",
")",
":",
"return",
"(",
"self",
".",
"_get_magnitude_scaling_term",
"(",
"C",
",",
"rup",
".",
"mag",
")",
"+",
"self",
".",
"_get_distance_scaling_term",
"(",
"C",
","... | Returns the mean value of ground motion - noting that in this case
the style-of-faulting term is neglected | [
"Returns",
"the",
"mean",
"value",
"of",
"ground",
"motion",
"-",
"noting",
"that",
"in",
"this",
"case",
"the",
"style",
"-",
"of",
"-",
"faulting",
"term",
"is",
"neglected"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/bindi_2014.py#L290-L297 | train | 214,140 |
gem/oq-engine | openquake/calculators/event_based_risk.py | build_loss_tables | def build_loss_tables(dstore):
"""
Compute the total losses by rupture and losses by rlzi.
"""
oq = dstore['oqparam']
L = len(oq.loss_dt().names)
R = dstore['csm_info'].get_num_rlzs()
serials = dstore['ruptures']['serial']
idx_by_ser = dict(zip(serials, range(len(serials))))
tbl = numpy.zeros((len(serials), L), F32)
lbr = numpy.zeros((R, L), F32) # losses by rlz
for rec in dstore['losses_by_event'].value: # call .value for speed
idx = idx_by_ser[rec['eid'] // TWO32]
tbl[idx] += rec['loss']
lbr[rec['rlzi']] += rec['loss']
return tbl, lbr | python | def build_loss_tables(dstore):
"""
Compute the total losses by rupture and losses by rlzi.
"""
oq = dstore['oqparam']
L = len(oq.loss_dt().names)
R = dstore['csm_info'].get_num_rlzs()
serials = dstore['ruptures']['serial']
idx_by_ser = dict(zip(serials, range(len(serials))))
tbl = numpy.zeros((len(serials), L), F32)
lbr = numpy.zeros((R, L), F32) # losses by rlz
for rec in dstore['losses_by_event'].value: # call .value for speed
idx = idx_by_ser[rec['eid'] // TWO32]
tbl[idx] += rec['loss']
lbr[rec['rlzi']] += rec['loss']
return tbl, lbr | [
"def",
"build_loss_tables",
"(",
"dstore",
")",
":",
"oq",
"=",
"dstore",
"[",
"'oqparam'",
"]",
"L",
"=",
"len",
"(",
"oq",
".",
"loss_dt",
"(",
")",
".",
"names",
")",
"R",
"=",
"dstore",
"[",
"'csm_info'",
"]",
".",
"get_num_rlzs",
"(",
")",
"se... | Compute the total losses by rupture and losses by rlzi. | [
"Compute",
"the",
"total",
"losses",
"by",
"rupture",
"and",
"losses",
"by",
"rlzi",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/event_based_risk.py#L39-L54 | train | 214,141 |
gem/oq-engine | openquake/engine/tools/make_html_report.py | html | def html(header_rows):
"""
Convert a list of tuples describing a table into a HTML string
"""
name = 'table%d' % next(tablecounter)
return HtmlTable([map(str, row) for row in header_rows], name).render() | python | def html(header_rows):
"""
Convert a list of tuples describing a table into a HTML string
"""
name = 'table%d' % next(tablecounter)
return HtmlTable([map(str, row) for row in header_rows], name).render() | [
"def",
"html",
"(",
"header_rows",
")",
":",
"name",
"=",
"'table%d'",
"%",
"next",
"(",
"tablecounter",
")",
"return",
"HtmlTable",
"(",
"[",
"map",
"(",
"str",
",",
"row",
")",
"for",
"row",
"in",
"header_rows",
"]",
",",
"name",
")",
".",
"render"... | Convert a list of tuples describing a table into a HTML string | [
"Convert",
"a",
"list",
"of",
"tuples",
"describing",
"a",
"table",
"into",
"a",
"HTML",
"string"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/engine/tools/make_html_report.py#L34-L39 | train | 214,142 |
gem/oq-engine | openquake/engine/tools/make_html_report.py | make_tabs | def make_tabs(tag_ids, tag_status, tag_contents):
"""
Return a HTML string containing all the tabs we want to display
"""
templ = '''
<div id="tabs">
<ul>
%s
</ul>
%s
</div>'''
lis = []
contents = []
for i, (tag_id, status, tag_content) in enumerate(
zip(tag_ids, tag_status, tag_contents), 1):
mark = '.' if status == 'complete' else '!'
lis.append('<li><a href="#tabs-%d">%s%s</a></li>' % (i, tag_id, mark))
contents.append('<div id="tabs-%d">%s</div>' % (
i, tag_content))
return templ % ('\n'.join(lis), '\n'.join(contents)) | python | def make_tabs(tag_ids, tag_status, tag_contents):
"""
Return a HTML string containing all the tabs we want to display
"""
templ = '''
<div id="tabs">
<ul>
%s
</ul>
%s
</div>'''
lis = []
contents = []
for i, (tag_id, status, tag_content) in enumerate(
zip(tag_ids, tag_status, tag_contents), 1):
mark = '.' if status == 'complete' else '!'
lis.append('<li><a href="#tabs-%d">%s%s</a></li>' % (i, tag_id, mark))
contents.append('<div id="tabs-%d">%s</div>' % (
i, tag_content))
return templ % ('\n'.join(lis), '\n'.join(contents)) | [
"def",
"make_tabs",
"(",
"tag_ids",
",",
"tag_status",
",",
"tag_contents",
")",
":",
"templ",
"=",
"'''\n<div id=\"tabs\">\n<ul>\n%s\n</ul>\n%s\n</div>'''",
"lis",
"=",
"[",
"]",
"contents",
"=",
"[",
"]",
"for",
"i",
",",
"(",
"tag_id",
",",
"status",
",",
... | Return a HTML string containing all the tabs we want to display | [
"Return",
"a",
"HTML",
"string",
"containing",
"all",
"the",
"tabs",
"we",
"want",
"to",
"display"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/engine/tools/make_html_report.py#L125-L144 | train | 214,143 |
gem/oq-engine | openquake/engine/tools/make_html_report.py | make_report | def make_report(isodate='today'):
"""
Build a HTML report with the computations performed at the given isodate.
Return the name of the report, which is saved in the current directory.
"""
if isodate == 'today':
isodate = date.today()
else:
isodate = date(*time.strptime(isodate, '%Y-%m-%d')[:3])
isodate1 = isodate + timedelta(1) # +1 day
tag_ids = []
tag_status = []
tag_contents = []
# the fetcher returns an header which is stripped with [1:]
jobs = dbcmd(
'fetch', ALL_JOBS, isodate.isoformat(), isodate1.isoformat())
page = '<h2>%d job(s) finished before midnight of %s</h2>' % (
len(jobs), isodate)
for job_id, user, status, ds_calc in jobs:
tag_ids.append(job_id)
tag_status.append(status)
[stats] = dbcmd('fetch', JOB_STATS, job_id)
(job_id, user, start_time, stop_time, status, duration) = stats
try:
ds = read(job_id, datadir=os.path.dirname(ds_calc))
txt = view_fullreport('fullreport', ds)
report = html_parts(txt)
except Exception as exc:
report = dict(
html_title='Could not generate report: %s' % cgi.escape(
str(exc), quote=True),
fragment='')
page = report['html_title']
page += html([stats._fields, stats])
page += report['fragment']
tag_contents.append(page)
page = make_tabs(tag_ids, tag_status, tag_contents) + (
'Report last updated: %s' % datetime.now())
fname = 'jobs-%s.html' % isodate
with open(fname, 'w') as f:
f.write(PAGE_TEMPLATE % page)
return fname | python | def make_report(isodate='today'):
"""
Build a HTML report with the computations performed at the given isodate.
Return the name of the report, which is saved in the current directory.
"""
if isodate == 'today':
isodate = date.today()
else:
isodate = date(*time.strptime(isodate, '%Y-%m-%d')[:3])
isodate1 = isodate + timedelta(1) # +1 day
tag_ids = []
tag_status = []
tag_contents = []
# the fetcher returns an header which is stripped with [1:]
jobs = dbcmd(
'fetch', ALL_JOBS, isodate.isoformat(), isodate1.isoformat())
page = '<h2>%d job(s) finished before midnight of %s</h2>' % (
len(jobs), isodate)
for job_id, user, status, ds_calc in jobs:
tag_ids.append(job_id)
tag_status.append(status)
[stats] = dbcmd('fetch', JOB_STATS, job_id)
(job_id, user, start_time, stop_time, status, duration) = stats
try:
ds = read(job_id, datadir=os.path.dirname(ds_calc))
txt = view_fullreport('fullreport', ds)
report = html_parts(txt)
except Exception as exc:
report = dict(
html_title='Could not generate report: %s' % cgi.escape(
str(exc), quote=True),
fragment='')
page = report['html_title']
page += html([stats._fields, stats])
page += report['fragment']
tag_contents.append(page)
page = make_tabs(tag_ids, tag_status, tag_contents) + (
'Report last updated: %s' % datetime.now())
fname = 'jobs-%s.html' % isodate
with open(fname, 'w') as f:
f.write(PAGE_TEMPLATE % page)
return fname | [
"def",
"make_report",
"(",
"isodate",
"=",
"'today'",
")",
":",
"if",
"isodate",
"==",
"'today'",
":",
"isodate",
"=",
"date",
".",
"today",
"(",
")",
"else",
":",
"isodate",
"=",
"date",
"(",
"*",
"time",
".",
"strptime",
"(",
"isodate",
",",
"'%Y-%... | Build a HTML report with the computations performed at the given isodate.
Return the name of the report, which is saved in the current directory. | [
"Build",
"a",
"HTML",
"report",
"with",
"the",
"computations",
"performed",
"at",
"the",
"given",
"isodate",
".",
"Return",
"the",
"name",
"of",
"the",
"report",
"which",
"is",
"saved",
"in",
"the",
"current",
"directory",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/engine/tools/make_html_report.py#L147-L191 | train | 214,144 |
gem/oq-engine | openquake/calculators/scenario_risk.py | scenario_risk | def scenario_risk(riskinputs, riskmodel, param, monitor):
"""
Core function for a scenario computation.
:param riskinput:
a of :class:`openquake.risklib.riskinput.RiskInput` object
:param riskmodel:
a :class:`openquake.risklib.riskinput.CompositeRiskModel` instance
:param param:
dictionary of extra parameters
:param monitor:
:class:`openquake.baselib.performance.Monitor` instance
:returns:
a dictionary {
'agg': array of shape (E, L, R, 2),
'avg': list of tuples (lt_idx, rlz_idx, asset_ordinal, statistics)
}
where E is the number of simulated events, L the number of loss types,
R the number of realizations and statistics is an array of shape
(n, R, 4), with n the number of assets in the current riskinput object
"""
E = param['E']
L = len(riskmodel.loss_types)
result = dict(agg=numpy.zeros((E, L), F32), avg=[],
all_losses=AccumDict(accum={}))
for ri in riskinputs:
for out in riskmodel.gen_outputs(ri, monitor, param['epspath']):
r = out.rlzi
weight = param['weights'][r]
slc = param['event_slice'](r)
for l, loss_type in enumerate(riskmodel.loss_types):
losses = out[loss_type]
if numpy.product(losses.shape) == 0: # happens for all NaNs
continue
stats = numpy.zeros(len(ri.assets), stat_dt) # mean, stddev
for a, asset in enumerate(ri.assets):
stats['mean'][a] = losses[a].mean()
stats['stddev'][a] = losses[a].std(ddof=1)
result['avg'].append((l, r, asset['ordinal'], stats[a]))
agglosses = losses.sum(axis=0) # shape num_gmfs
result['agg'][slc, l] += agglosses * weight
if param['asset_loss_table']:
aids = ri.assets['ordinal']
result['all_losses'][l, r] += AccumDict(zip(aids, losses))
return result | python | def scenario_risk(riskinputs, riskmodel, param, monitor):
"""
Core function for a scenario computation.
:param riskinput:
a of :class:`openquake.risklib.riskinput.RiskInput` object
:param riskmodel:
a :class:`openquake.risklib.riskinput.CompositeRiskModel` instance
:param param:
dictionary of extra parameters
:param monitor:
:class:`openquake.baselib.performance.Monitor` instance
:returns:
a dictionary {
'agg': array of shape (E, L, R, 2),
'avg': list of tuples (lt_idx, rlz_idx, asset_ordinal, statistics)
}
where E is the number of simulated events, L the number of loss types,
R the number of realizations and statistics is an array of shape
(n, R, 4), with n the number of assets in the current riskinput object
"""
E = param['E']
L = len(riskmodel.loss_types)
result = dict(agg=numpy.zeros((E, L), F32), avg=[],
all_losses=AccumDict(accum={}))
for ri in riskinputs:
for out in riskmodel.gen_outputs(ri, monitor, param['epspath']):
r = out.rlzi
weight = param['weights'][r]
slc = param['event_slice'](r)
for l, loss_type in enumerate(riskmodel.loss_types):
losses = out[loss_type]
if numpy.product(losses.shape) == 0: # happens for all NaNs
continue
stats = numpy.zeros(len(ri.assets), stat_dt) # mean, stddev
for a, asset in enumerate(ri.assets):
stats['mean'][a] = losses[a].mean()
stats['stddev'][a] = losses[a].std(ddof=1)
result['avg'].append((l, r, asset['ordinal'], stats[a]))
agglosses = losses.sum(axis=0) # shape num_gmfs
result['agg'][slc, l] += agglosses * weight
if param['asset_loss_table']:
aids = ri.assets['ordinal']
result['all_losses'][l, r] += AccumDict(zip(aids, losses))
return result | [
"def",
"scenario_risk",
"(",
"riskinputs",
",",
"riskmodel",
",",
"param",
",",
"monitor",
")",
":",
"E",
"=",
"param",
"[",
"'E'",
"]",
"L",
"=",
"len",
"(",
"riskmodel",
".",
"loss_types",
")",
"result",
"=",
"dict",
"(",
"agg",
"=",
"numpy",
".",
... | Core function for a scenario computation.
:param riskinput:
a of :class:`openquake.risklib.riskinput.RiskInput` object
:param riskmodel:
a :class:`openquake.risklib.riskinput.CompositeRiskModel` instance
:param param:
dictionary of extra parameters
:param monitor:
:class:`openquake.baselib.performance.Monitor` instance
:returns:
a dictionary {
'agg': array of shape (E, L, R, 2),
'avg': list of tuples (lt_idx, rlz_idx, asset_ordinal, statistics)
}
where E is the number of simulated events, L the number of loss types,
R the number of realizations and statistics is an array of shape
(n, R, 4), with n the number of assets in the current riskinput object | [
"Core",
"function",
"for",
"a",
"scenario",
"computation",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/scenario_risk.py#L38-L82 | train | 214,145 |
gem/oq-engine | openquake/hmtk/seismicity/selector.py | _check_depth_limits | def _check_depth_limits(input_dict):
'''Returns the default upper and lower depth values if not in dictionary
:param input_dict:
Dictionary corresponding to the kwargs dictionary of calling function
:returns:
'upper_depth': Upper seismogenic depth (float)
'lower_depth': Lower seismogenic depth (float)
'''
if ('upper_depth' in input_dict.keys()) and input_dict['upper_depth']:
if input_dict['upper_depth'] < 0.:
raise ValueError('Upper seismogenic depth must be positive')
else:
upper_depth = input_dict['upper_depth']
else:
upper_depth = 0.0
if ('lower_depth' in input_dict.keys()) and input_dict['lower_depth']:
if input_dict['lower_depth'] < upper_depth:
raise ValueError('Lower depth must take a greater value than'
' upper depth!')
else:
lower_depth = input_dict['lower_depth']
else:
lower_depth = np.inf
return upper_depth, lower_depth | python | def _check_depth_limits(input_dict):
'''Returns the default upper and lower depth values if not in dictionary
:param input_dict:
Dictionary corresponding to the kwargs dictionary of calling function
:returns:
'upper_depth': Upper seismogenic depth (float)
'lower_depth': Lower seismogenic depth (float)
'''
if ('upper_depth' in input_dict.keys()) and input_dict['upper_depth']:
if input_dict['upper_depth'] < 0.:
raise ValueError('Upper seismogenic depth must be positive')
else:
upper_depth = input_dict['upper_depth']
else:
upper_depth = 0.0
if ('lower_depth' in input_dict.keys()) and input_dict['lower_depth']:
if input_dict['lower_depth'] < upper_depth:
raise ValueError('Lower depth must take a greater value than'
' upper depth!')
else:
lower_depth = input_dict['lower_depth']
else:
lower_depth = np.inf
return upper_depth, lower_depth | [
"def",
"_check_depth_limits",
"(",
"input_dict",
")",
":",
"if",
"(",
"'upper_depth'",
"in",
"input_dict",
".",
"keys",
"(",
")",
")",
"and",
"input_dict",
"[",
"'upper_depth'",
"]",
":",
"if",
"input_dict",
"[",
"'upper_depth'",
"]",
"<",
"0.",
":",
"rais... | Returns the default upper and lower depth values if not in dictionary
:param input_dict:
Dictionary corresponding to the kwargs dictionary of calling function
:returns:
'upper_depth': Upper seismogenic depth (float)
'lower_depth': Lower seismogenic depth (float) | [
"Returns",
"the",
"default",
"upper",
"and",
"lower",
"depth",
"values",
"if",
"not",
"in",
"dictionary"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/seismicity/selector.py#L63-L89 | train | 214,146 |
gem/oq-engine | openquake/hmtk/seismicity/selector.py | _get_decimal_from_datetime | def _get_decimal_from_datetime(time):
'''
As the decimal time function requires inputs in the form of numpy
arrays need to convert each value in the datetime object to a single
numpy array
'''
# Get decimal seconds from seconds + microseconds
temp_seconds = np.float(time.second) + (np.float(time.microsecond) / 1.0E6)
return decimal_time(np.array([time.year], dtype=int),
np.array([time.month], dtype=int),
np.array([time.day], dtype=int),
np.array([time.hour], dtype=int),
np.array([time.minute], dtype=int),
np.array([temp_seconds], dtype=int)) | python | def _get_decimal_from_datetime(time):
'''
As the decimal time function requires inputs in the form of numpy
arrays need to convert each value in the datetime object to a single
numpy array
'''
# Get decimal seconds from seconds + microseconds
temp_seconds = np.float(time.second) + (np.float(time.microsecond) / 1.0E6)
return decimal_time(np.array([time.year], dtype=int),
np.array([time.month], dtype=int),
np.array([time.day], dtype=int),
np.array([time.hour], dtype=int),
np.array([time.minute], dtype=int),
np.array([temp_seconds], dtype=int)) | [
"def",
"_get_decimal_from_datetime",
"(",
"time",
")",
":",
"# Get decimal seconds from seconds + microseconds",
"temp_seconds",
"=",
"np",
".",
"float",
"(",
"time",
".",
"second",
")",
"+",
"(",
"np",
".",
"float",
"(",
"time",
".",
"microsecond",
")",
"/",
... | As the decimal time function requires inputs in the form of numpy
arrays need to convert each value in the datetime object to a single
numpy array | [
"As",
"the",
"decimal",
"time",
"function",
"requires",
"inputs",
"in",
"the",
"form",
"of",
"numpy",
"arrays",
"need",
"to",
"convert",
"each",
"value",
"in",
"the",
"datetime",
"object",
"to",
"a",
"single",
"numpy",
"array"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/seismicity/selector.py#L92-L106 | train | 214,147 |
gem/oq-engine | openquake/hmtk/seismicity/selector.py | CatalogueSelector.select_catalogue | def select_catalogue(self, valid_id):
'''
Method to post-process the catalogue based on the selection options
:param numpy.ndarray valid_id:
Boolean vector indicating whether each event is selected (True)
or not (False)
:returns:
Catalogue of selected events as instance of
openquake.hmtk.seismicity.catalogue.Catalogue class
'''
if not np.any(valid_id):
# No events selected - create clean instance of class
output = Catalogue()
output.processes = self.catalogue.processes
elif np.all(valid_id):
if self.copycat:
output = deepcopy(self.catalogue)
else:
output = self.catalogue
else:
if self.copycat:
output = deepcopy(self.catalogue)
else:
output = self.catalogue
output.purge_catalogue(valid_id)
return output | python | def select_catalogue(self, valid_id):
'''
Method to post-process the catalogue based on the selection options
:param numpy.ndarray valid_id:
Boolean vector indicating whether each event is selected (True)
or not (False)
:returns:
Catalogue of selected events as instance of
openquake.hmtk.seismicity.catalogue.Catalogue class
'''
if not np.any(valid_id):
# No events selected - create clean instance of class
output = Catalogue()
output.processes = self.catalogue.processes
elif np.all(valid_id):
if self.copycat:
output = deepcopy(self.catalogue)
else:
output = self.catalogue
else:
if self.copycat:
output = deepcopy(self.catalogue)
else:
output = self.catalogue
output.purge_catalogue(valid_id)
return output | [
"def",
"select_catalogue",
"(",
"self",
",",
"valid_id",
")",
":",
"if",
"not",
"np",
".",
"any",
"(",
"valid_id",
")",
":",
"# No events selected - create clean instance of class",
"output",
"=",
"Catalogue",
"(",
")",
"output",
".",
"processes",
"=",
"self",
... | Method to post-process the catalogue based on the selection options
:param numpy.ndarray valid_id:
Boolean vector indicating whether each event is selected (True)
or not (False)
:returns:
Catalogue of selected events as instance of
openquake.hmtk.seismicity.catalogue.Catalogue class | [
"Method",
"to",
"post",
"-",
"process",
"the",
"catalogue",
"based",
"on",
"the",
"selection",
"options"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/seismicity/selector.py#L133-L161 | train | 214,148 |
gem/oq-engine | openquake/hmtk/seismicity/selector.py | CatalogueSelector.within_polygon | def within_polygon(self, polygon, distance=None, **kwargs):
'''
Select earthquakes within polygon
:param polygon:
Centre point as instance of nhlib.geo.polygon.Polygon class
:param float distance:
Buffer distance (km) (can take negative values)
:returns:
Instance of :class:`openquake.hmtk.seismicity.catalogue.Catalogue`
containing only selected events
'''
if distance:
# If a distance is specified then dilate the polyon by distance
zone_polygon = polygon.dilate(distance)
else:
zone_polygon = polygon
# Make valid all events inside depth range
upper_depth, lower_depth = _check_depth_limits(kwargs)
valid_depth = np.logical_and(
self.catalogue.data['depth'] >= upper_depth,
self.catalogue.data['depth'] < lower_depth)
# Events outside polygon returned to invalid assignment
catalogue_mesh = Mesh(self.catalogue.data['longitude'],
self.catalogue.data['latitude'],
self.catalogue.data['depth'])
valid_id = np.logical_and(valid_depth,
zone_polygon.intersects(catalogue_mesh))
return self.select_catalogue(valid_id) | python | def within_polygon(self, polygon, distance=None, **kwargs):
'''
Select earthquakes within polygon
:param polygon:
Centre point as instance of nhlib.geo.polygon.Polygon class
:param float distance:
Buffer distance (km) (can take negative values)
:returns:
Instance of :class:`openquake.hmtk.seismicity.catalogue.Catalogue`
containing only selected events
'''
if distance:
# If a distance is specified then dilate the polyon by distance
zone_polygon = polygon.dilate(distance)
else:
zone_polygon = polygon
# Make valid all events inside depth range
upper_depth, lower_depth = _check_depth_limits(kwargs)
valid_depth = np.logical_and(
self.catalogue.data['depth'] >= upper_depth,
self.catalogue.data['depth'] < lower_depth)
# Events outside polygon returned to invalid assignment
catalogue_mesh = Mesh(self.catalogue.data['longitude'],
self.catalogue.data['latitude'],
self.catalogue.data['depth'])
valid_id = np.logical_and(valid_depth,
zone_polygon.intersects(catalogue_mesh))
return self.select_catalogue(valid_id) | [
"def",
"within_polygon",
"(",
"self",
",",
"polygon",
",",
"distance",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"distance",
":",
"# If a distance is specified then dilate the polyon by distance",
"zone_polygon",
"=",
"polygon",
".",
"dilate",
"(",
"di... | Select earthquakes within polygon
:param polygon:
Centre point as instance of nhlib.geo.polygon.Polygon class
:param float distance:
Buffer distance (km) (can take negative values)
:returns:
Instance of :class:`openquake.hmtk.seismicity.catalogue.Catalogue`
containing only selected events | [
"Select",
"earthquakes",
"within",
"polygon"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/seismicity/selector.py#L163-L197 | train | 214,149 |
gem/oq-engine | openquake/hmtk/seismicity/selector.py | CatalogueSelector.circular_distance_from_point | def circular_distance_from_point(self, point, distance, **kwargs):
'''
Select earthquakes within a distance from a Point
:param point:
Centre point as instance of nhlib.geo.point.Point class
:param float distance:
Distance (km)
:returns:
Instance of :class:`openquake.hmtk.seismicity.catalogue.Catalogue`
containing only selected events
'''
if kwargs['distance_type'] is 'epicentral':
locations = Mesh(
self.catalogue.data['longitude'],
self.catalogue.data['latitude'],
np.zeros(len(self.catalogue.data['longitude']), dtype=float))
point = Point(point.longitude, point.latitude, 0.0)
else:
locations = self.catalogue.hypocentres_as_mesh()
is_close = point.closer_than(locations, distance)
return self.select_catalogue(is_close) | python | def circular_distance_from_point(self, point, distance, **kwargs):
'''
Select earthquakes within a distance from a Point
:param point:
Centre point as instance of nhlib.geo.point.Point class
:param float distance:
Distance (km)
:returns:
Instance of :class:`openquake.hmtk.seismicity.catalogue.Catalogue`
containing only selected events
'''
if kwargs['distance_type'] is 'epicentral':
locations = Mesh(
self.catalogue.data['longitude'],
self.catalogue.data['latitude'],
np.zeros(len(self.catalogue.data['longitude']), dtype=float))
point = Point(point.longitude, point.latitude, 0.0)
else:
locations = self.catalogue.hypocentres_as_mesh()
is_close = point.closer_than(locations, distance)
return self.select_catalogue(is_close) | [
"def",
"circular_distance_from_point",
"(",
"self",
",",
"point",
",",
"distance",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"kwargs",
"[",
"'distance_type'",
"]",
"is",
"'epicentral'",
":",
"locations",
"=",
"Mesh",
"(",
"self",
".",
"catalogue",
".",
"dat... | Select earthquakes within a distance from a Point
:param point:
Centre point as instance of nhlib.geo.point.Point class
:param float distance:
Distance (km)
:returns:
Instance of :class:`openquake.hmtk.seismicity.catalogue.Catalogue`
containing only selected events | [
"Select",
"earthquakes",
"within",
"a",
"distance",
"from",
"a",
"Point"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/seismicity/selector.py#L199-L225 | train | 214,150 |
gem/oq-engine | openquake/hmtk/seismicity/selector.py | CatalogueSelector.cartesian_square_centred_on_point | def cartesian_square_centred_on_point(self, point, distance, **kwargs):
'''
Select earthquakes from within a square centered on a point
:param point:
Centre point as instance of nhlib.geo.point.Point class
:param distance:
Distance (km)
:returns:
Instance of :class:`openquake.hmtk.seismicity.catalogue.Catalogue`
class containing only selected events
'''
point_surface = Point(point.longitude, point.latitude, 0.)
# As distance is
north_point = point_surface.point_at(distance, 0., 0.)
east_point = point_surface.point_at(distance, 0., 90.)
south_point = point_surface.point_at(distance, 0., 180.)
west_point = point_surface.point_at(distance, 0., 270.)
is_long = np.logical_and(
self.catalogue.data['longitude'] >= west_point.longitude,
self.catalogue.data['longitude'] < east_point.longitude)
is_surface = np.logical_and(
is_long,
self.catalogue.data['latitude'] >= south_point.latitude,
self.catalogue.data['latitude'] < north_point.latitude)
upper_depth, lower_depth = _check_depth_limits(kwargs)
is_valid = np.logical_and(
is_surface,
self.catalogue.data['depth'] >= upper_depth,
self.catalogue.data['depth'] < lower_depth)
return self.select_catalogue(is_valid) | python | def cartesian_square_centred_on_point(self, point, distance, **kwargs):
'''
Select earthquakes from within a square centered on a point
:param point:
Centre point as instance of nhlib.geo.point.Point class
:param distance:
Distance (km)
:returns:
Instance of :class:`openquake.hmtk.seismicity.catalogue.Catalogue`
class containing only selected events
'''
point_surface = Point(point.longitude, point.latitude, 0.)
# As distance is
north_point = point_surface.point_at(distance, 0., 0.)
east_point = point_surface.point_at(distance, 0., 90.)
south_point = point_surface.point_at(distance, 0., 180.)
west_point = point_surface.point_at(distance, 0., 270.)
is_long = np.logical_and(
self.catalogue.data['longitude'] >= west_point.longitude,
self.catalogue.data['longitude'] < east_point.longitude)
is_surface = np.logical_and(
is_long,
self.catalogue.data['latitude'] >= south_point.latitude,
self.catalogue.data['latitude'] < north_point.latitude)
upper_depth, lower_depth = _check_depth_limits(kwargs)
is_valid = np.logical_and(
is_surface,
self.catalogue.data['depth'] >= upper_depth,
self.catalogue.data['depth'] < lower_depth)
return self.select_catalogue(is_valid) | [
"def",
"cartesian_square_centred_on_point",
"(",
"self",
",",
"point",
",",
"distance",
",",
"*",
"*",
"kwargs",
")",
":",
"point_surface",
"=",
"Point",
"(",
"point",
".",
"longitude",
",",
"point",
".",
"latitude",
",",
"0.",
")",
"# As distance is",
"nort... | Select earthquakes from within a square centered on a point
:param point:
Centre point as instance of nhlib.geo.point.Point class
:param distance:
Distance (km)
:returns:
Instance of :class:`openquake.hmtk.seismicity.catalogue.Catalogue`
class containing only selected events | [
"Select",
"earthquakes",
"from",
"within",
"a",
"square",
"centered",
"on",
"a",
"point"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/seismicity/selector.py#L227-L261 | train | 214,151 |
gem/oq-engine | openquake/hmtk/seismicity/selector.py | CatalogueSelector.within_joyner_boore_distance | def within_joyner_boore_distance(self, surface, distance, **kwargs):
'''
Select events within a Joyner-Boore distance of a fault
:param surface:
Fault surface as instance of
nhlib.geo.surface.base.SimpleFaultSurface or as instance of
nhlib.geo.surface.ComplexFaultSurface
:param float distance:
Rupture distance (km)
:returns:
Instance of :class:`openquake.hmtk.seismicity.catalogue.Catalogue`
containing only selected events
'''
upper_depth, lower_depth = _check_depth_limits(kwargs)
rjb = surface.get_joyner_boore_distance(
self.catalogue.hypocentres_as_mesh())
is_valid = np.logical_and(
rjb <= distance,
np.logical_and(self.catalogue.data['depth'] >= upper_depth,
self.catalogue.data['depth'] < lower_depth))
return self.select_catalogue(is_valid) | python | def within_joyner_boore_distance(self, surface, distance, **kwargs):
'''
Select events within a Joyner-Boore distance of a fault
:param surface:
Fault surface as instance of
nhlib.geo.surface.base.SimpleFaultSurface or as instance of
nhlib.geo.surface.ComplexFaultSurface
:param float distance:
Rupture distance (km)
:returns:
Instance of :class:`openquake.hmtk.seismicity.catalogue.Catalogue`
containing only selected events
'''
upper_depth, lower_depth = _check_depth_limits(kwargs)
rjb = surface.get_joyner_boore_distance(
self.catalogue.hypocentres_as_mesh())
is_valid = np.logical_and(
rjb <= distance,
np.logical_and(self.catalogue.data['depth'] >= upper_depth,
self.catalogue.data['depth'] < lower_depth))
return self.select_catalogue(is_valid) | [
"def",
"within_joyner_boore_distance",
"(",
"self",
",",
"surface",
",",
"distance",
",",
"*",
"*",
"kwargs",
")",
":",
"upper_depth",
",",
"lower_depth",
"=",
"_check_depth_limits",
"(",
"kwargs",
")",
"rjb",
"=",
"surface",
".",
"get_joyner_boore_distance",
"(... | Select events within a Joyner-Boore distance of a fault
:param surface:
Fault surface as instance of
nhlib.geo.surface.base.SimpleFaultSurface or as instance of
nhlib.geo.surface.ComplexFaultSurface
:param float distance:
Rupture distance (km)
:returns:
Instance of :class:`openquake.hmtk.seismicity.catalogue.Catalogue`
containing only selected events | [
"Select",
"events",
"within",
"a",
"Joyner",
"-",
"Boore",
"distance",
"of",
"a",
"fault"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/seismicity/selector.py#L263-L288 | train | 214,152 |
gem/oq-engine | openquake/hmtk/seismicity/selector.py | CatalogueSelector.within_rupture_distance | def within_rupture_distance(self, surface, distance, **kwargs):
'''
Select events within a rupture distance from a fault surface
:param surface:
Fault surface as instance of nhlib.geo.surface.base.BaseSurface
:param float distance:
Rupture distance (km)
:returns:
Instance of :class:`openquake.hmtk.seismicity.catalogue.Catalogue`
containing only selected events
'''
# Check for upper and lower depths
upper_depth, lower_depth = _check_depth_limits(kwargs)
rrupt = surface.get_min_distance(self.catalogue.hypocentres_as_mesh())
is_valid = np.logical_and(
rrupt <= distance,
np.logical_and(self.catalogue.data['depth'] >= upper_depth,
self.catalogue.data['depth'] < lower_depth))
return self.select_catalogue(is_valid) | python | def within_rupture_distance(self, surface, distance, **kwargs):
'''
Select events within a rupture distance from a fault surface
:param surface:
Fault surface as instance of nhlib.geo.surface.base.BaseSurface
:param float distance:
Rupture distance (km)
:returns:
Instance of :class:`openquake.hmtk.seismicity.catalogue.Catalogue`
containing only selected events
'''
# Check for upper and lower depths
upper_depth, lower_depth = _check_depth_limits(kwargs)
rrupt = surface.get_min_distance(self.catalogue.hypocentres_as_mesh())
is_valid = np.logical_and(
rrupt <= distance,
np.logical_and(self.catalogue.data['depth'] >= upper_depth,
self.catalogue.data['depth'] < lower_depth))
return self.select_catalogue(is_valid) | [
"def",
"within_rupture_distance",
"(",
"self",
",",
"surface",
",",
"distance",
",",
"*",
"*",
"kwargs",
")",
":",
"# Check for upper and lower depths",
"upper_depth",
",",
"lower_depth",
"=",
"_check_depth_limits",
"(",
"kwargs",
")",
"rrupt",
"=",
"surface",
"."... | Select events within a rupture distance from a fault surface
:param surface:
Fault surface as instance of nhlib.geo.surface.base.BaseSurface
:param float distance:
Rupture distance (km)
:returns:
Instance of :class:`openquake.hmtk.seismicity.catalogue.Catalogue`
containing only selected events | [
"Select",
"events",
"within",
"a",
"rupture",
"distance",
"from",
"a",
"fault",
"surface"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/seismicity/selector.py#L290-L313 | train | 214,153 |
gem/oq-engine | openquake/hmtk/seismicity/selector.py | CatalogueSelector.within_time_period | def within_time_period(self, start_time=None, end_time=None):
'''
Select earthquakes occurring within a given time period
:param start_time:
Earliest time (as datetime.datetime object)
:param end_time:
Latest time (as datetime.datetime object)
:returns:
Instance of :class:`openquake.hmtk.seismicity.catalogue.Catalogue`
containing only selected events
'''
time_value = self.catalogue.get_decimal_time()
if not start_time:
if not end_time:
# No times input, therefore skip everything and return catalog
return self.catalogue
else:
start_time = np.min(self.catalogue.data['year'])
else:
start_time = _get_decimal_from_datetime(start_time)
if not end_time:
end_time = _get_decimal_from_datetime(datetime.now())
else:
end_time = _get_decimal_from_datetime(end_time)
# Get decimal time values
time_value = self.catalogue.get_decimal_time()
is_valid = np.logical_and(time_value >= start_time,
time_value < end_time)
return self.select_catalogue(is_valid) | python | def within_time_period(self, start_time=None, end_time=None):
'''
Select earthquakes occurring within a given time period
:param start_time:
Earliest time (as datetime.datetime object)
:param end_time:
Latest time (as datetime.datetime object)
:returns:
Instance of :class:`openquake.hmtk.seismicity.catalogue.Catalogue`
containing only selected events
'''
time_value = self.catalogue.get_decimal_time()
if not start_time:
if not end_time:
# No times input, therefore skip everything and return catalog
return self.catalogue
else:
start_time = np.min(self.catalogue.data['year'])
else:
start_time = _get_decimal_from_datetime(start_time)
if not end_time:
end_time = _get_decimal_from_datetime(datetime.now())
else:
end_time = _get_decimal_from_datetime(end_time)
# Get decimal time values
time_value = self.catalogue.get_decimal_time()
is_valid = np.logical_and(time_value >= start_time,
time_value < end_time)
return self.select_catalogue(is_valid) | [
"def",
"within_time_period",
"(",
"self",
",",
"start_time",
"=",
"None",
",",
"end_time",
"=",
"None",
")",
":",
"time_value",
"=",
"self",
".",
"catalogue",
".",
"get_decimal_time",
"(",
")",
"if",
"not",
"start_time",
":",
"if",
"not",
"end_time",
":",
... | Select earthquakes occurring within a given time period
:param start_time:
Earliest time (as datetime.datetime object)
:param end_time:
Latest time (as datetime.datetime object)
:returns:
Instance of :class:`openquake.hmtk.seismicity.catalogue.Catalogue`
containing only selected events | [
"Select",
"earthquakes",
"occurring",
"within",
"a",
"given",
"time",
"period"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/seismicity/selector.py#L315-L350 | train | 214,154 |
gem/oq-engine | openquake/hmtk/seismicity/selector.py | CatalogueSelector.within_depth_range | def within_depth_range(self, lower_depth=None, upper_depth=None):
'''
Selects events within a specified depth range
:param float lower_depth:
Lower depth for consideration
:param float upper_depth:
Upper depth for consideration
:returns:
Instance of :class:`openquake.hmtk.seismicity.catalogue.Catalogue`
containing only selected events
'''
if not lower_depth:
if not upper_depth:
# No limiting depths defined - so return entire catalogue!
return self.catalogue
else:
lower_depth = np.inf
if not upper_depth:
upper_depth = 0.0
is_valid = np.logical_and(self.catalogue.data['depth'] >= upper_depth,
self.catalogue.data['depth'] < lower_depth)
return self.select_catalogue(is_valid) | python | def within_depth_range(self, lower_depth=None, upper_depth=None):
'''
Selects events within a specified depth range
:param float lower_depth:
Lower depth for consideration
:param float upper_depth:
Upper depth for consideration
:returns:
Instance of :class:`openquake.hmtk.seismicity.catalogue.Catalogue`
containing only selected events
'''
if not lower_depth:
if not upper_depth:
# No limiting depths defined - so return entire catalogue!
return self.catalogue
else:
lower_depth = np.inf
if not upper_depth:
upper_depth = 0.0
is_valid = np.logical_and(self.catalogue.data['depth'] >= upper_depth,
self.catalogue.data['depth'] < lower_depth)
return self.select_catalogue(is_valid) | [
"def",
"within_depth_range",
"(",
"self",
",",
"lower_depth",
"=",
"None",
",",
"upper_depth",
"=",
"None",
")",
":",
"if",
"not",
"lower_depth",
":",
"if",
"not",
"upper_depth",
":",
"# No limiting depths defined - so return entire catalogue!",
"return",
"self",
".... | Selects events within a specified depth range
:param float lower_depth:
Lower depth for consideration
:param float upper_depth:
Upper depth for consideration
:returns:
Instance of :class:`openquake.hmtk.seismicity.catalogue.Catalogue`
containing only selected events | [
"Selects",
"events",
"within",
"a",
"specified",
"depth",
"range"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/seismicity/selector.py#L352-L378 | train | 214,155 |
gem/oq-engine | openquake/hmtk/seismicity/selector.py | CatalogueSelector.create_cluster_set | def create_cluster_set(self, vcl):
"""
For a given catalogue and list of cluster IDs this function splits
the catalogue into a dictionary containing an individual catalogue
of events within each cluster
:param numpy.ndarray vcl:
Cluster ID list
:returns:
Dictionary of instances of the :class:
openquake.hmtk.seismicity.catalogue.Catalogue, where each instance
if the catalogue of each cluster
"""
num_clust = np.max(vcl)
cluster_set = []
for clid in range(0, num_clust + 1):
idx = np.where(vcl == clid)[0]
cluster_cat = deepcopy(self.catalogue)
cluster_cat.select_catalogue_events(idx)
cluster_set.append((clid, cluster_cat))
return dict(cluster_set) | python | def create_cluster_set(self, vcl):
"""
For a given catalogue and list of cluster IDs this function splits
the catalogue into a dictionary containing an individual catalogue
of events within each cluster
:param numpy.ndarray vcl:
Cluster ID list
:returns:
Dictionary of instances of the :class:
openquake.hmtk.seismicity.catalogue.Catalogue, where each instance
if the catalogue of each cluster
"""
num_clust = np.max(vcl)
cluster_set = []
for clid in range(0, num_clust + 1):
idx = np.where(vcl == clid)[0]
cluster_cat = deepcopy(self.catalogue)
cluster_cat.select_catalogue_events(idx)
cluster_set.append((clid, cluster_cat))
return dict(cluster_set) | [
"def",
"create_cluster_set",
"(",
"self",
",",
"vcl",
")",
":",
"num_clust",
"=",
"np",
".",
"max",
"(",
"vcl",
")",
"cluster_set",
"=",
"[",
"]",
"for",
"clid",
"in",
"range",
"(",
"0",
",",
"num_clust",
"+",
"1",
")",
":",
"idx",
"=",
"np",
"."... | For a given catalogue and list of cluster IDs this function splits
the catalogue into a dictionary containing an individual catalogue
of events within each cluster
:param numpy.ndarray vcl:
Cluster ID list
:returns:
Dictionary of instances of the :class:
openquake.hmtk.seismicity.catalogue.Catalogue, where each instance
if the catalogue of each cluster | [
"For",
"a",
"given",
"catalogue",
"and",
"list",
"of",
"cluster",
"IDs",
"this",
"function",
"splits",
"the",
"catalogue",
"into",
"a",
"dictionary",
"containing",
"an",
"individual",
"catalogue",
"of",
"events",
"within",
"each",
"cluster"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/seismicity/selector.py#L408-L428 | train | 214,156 |
gem/oq-engine | openquake/hmtk/seismicity/selector.py | CatalogueSelector.within_bounding_box | def within_bounding_box(self, limits):
"""
Selects the earthquakes within a bounding box.
:parameter limits:
A list or a numpy array with four elements in the following order:
- min x (longitude)
- min y (latitude)
- max x (longitude)
- max y (latitude)
:returns:
Returns a :class:htmk.seismicity.catalogue.Catalogue` instance
"""
is_valid = np.logical_and(
self.catalogue.data['longitude'] >= limits[0],
np.logical_and(self.catalogue.data['longitude'] <= limits[2],
np.logical_and(
self.catalogue.data['latitude'] >= limits[1],
self.catalogue.data['latitude'] <= limits[3])))
return self.select_catalogue(is_valid) | python | def within_bounding_box(self, limits):
"""
Selects the earthquakes within a bounding box.
:parameter limits:
A list or a numpy array with four elements in the following order:
- min x (longitude)
- min y (latitude)
- max x (longitude)
- max y (latitude)
:returns:
Returns a :class:htmk.seismicity.catalogue.Catalogue` instance
"""
is_valid = np.logical_and(
self.catalogue.data['longitude'] >= limits[0],
np.logical_and(self.catalogue.data['longitude'] <= limits[2],
np.logical_and(
self.catalogue.data['latitude'] >= limits[1],
self.catalogue.data['latitude'] <= limits[3])))
return self.select_catalogue(is_valid) | [
"def",
"within_bounding_box",
"(",
"self",
",",
"limits",
")",
":",
"is_valid",
"=",
"np",
".",
"logical_and",
"(",
"self",
".",
"catalogue",
".",
"data",
"[",
"'longitude'",
"]",
">=",
"limits",
"[",
"0",
"]",
",",
"np",
".",
"logical_and",
"(",
"self... | Selects the earthquakes within a bounding box.
:parameter limits:
A list or a numpy array with four elements in the following order:
- min x (longitude)
- min y (latitude)
- max x (longitude)
- max y (latitude)
:returns:
Returns a :class:htmk.seismicity.catalogue.Catalogue` instance | [
"Selects",
"the",
"earthquakes",
"within",
"a",
"bounding",
"box",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/seismicity/selector.py#L430-L449 | train | 214,157 |
gem/oq-engine | openquake/baselib/datastore.py | get_calc_ids | def get_calc_ids(datadir=None):
"""
Extract the available calculation IDs from the datadir, in order.
"""
datadir = datadir or get_datadir()
if not os.path.exists(datadir):
return []
calc_ids = set()
for f in os.listdir(datadir):
mo = re.match(CALC_REGEX, f)
if mo:
calc_ids.add(int(mo.group(2)))
return sorted(calc_ids) | python | def get_calc_ids(datadir=None):
"""
Extract the available calculation IDs from the datadir, in order.
"""
datadir = datadir or get_datadir()
if not os.path.exists(datadir):
return []
calc_ids = set()
for f in os.listdir(datadir):
mo = re.match(CALC_REGEX, f)
if mo:
calc_ids.add(int(mo.group(2)))
return sorted(calc_ids) | [
"def",
"get_calc_ids",
"(",
"datadir",
"=",
"None",
")",
":",
"datadir",
"=",
"datadir",
"or",
"get_datadir",
"(",
")",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"datadir",
")",
":",
"return",
"[",
"]",
"calc_ids",
"=",
"set",
"(",
")",
"... | Extract the available calculation IDs from the datadir, in order. | [
"Extract",
"the",
"available",
"calculation",
"IDs",
"from",
"the",
"datadir",
"in",
"order",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/datastore.py#L46-L58 | train | 214,158 |
gem/oq-engine | openquake/baselib/datastore.py | get_last_calc_id | def get_last_calc_id(datadir=None):
"""
Extract the latest calculation ID from the given directory.
If none is found, return 0.
"""
datadir = datadir or get_datadir()
calcs = get_calc_ids(datadir)
if not calcs:
return 0
return calcs[-1] | python | def get_last_calc_id(datadir=None):
"""
Extract the latest calculation ID from the given directory.
If none is found, return 0.
"""
datadir = datadir or get_datadir()
calcs = get_calc_ids(datadir)
if not calcs:
return 0
return calcs[-1] | [
"def",
"get_last_calc_id",
"(",
"datadir",
"=",
"None",
")",
":",
"datadir",
"=",
"datadir",
"or",
"get_datadir",
"(",
")",
"calcs",
"=",
"get_calc_ids",
"(",
"datadir",
")",
"if",
"not",
"calcs",
":",
"return",
"0",
"return",
"calcs",
"[",
"-",
"1",
"... | Extract the latest calculation ID from the given directory.
If none is found, return 0. | [
"Extract",
"the",
"latest",
"calculation",
"ID",
"from",
"the",
"given",
"directory",
".",
"If",
"none",
"is",
"found",
"return",
"0",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/datastore.py#L61-L70 | train | 214,159 |
gem/oq-engine | openquake/baselib/datastore.py | DataStore.open | def open(self, mode):
"""
Open the underlying .hdf5 file and the parent, if any
"""
if self.hdf5 == (): # not already open
kw = dict(mode=mode, libver='latest')
if mode == 'r':
kw['swmr'] = True
try:
self.hdf5 = hdf5.File(self.filename, **kw)
except OSError as exc:
raise OSError('%s in %s' % (exc, self.filename)) | python | def open(self, mode):
"""
Open the underlying .hdf5 file and the parent, if any
"""
if self.hdf5 == (): # not already open
kw = dict(mode=mode, libver='latest')
if mode == 'r':
kw['swmr'] = True
try:
self.hdf5 = hdf5.File(self.filename, **kw)
except OSError as exc:
raise OSError('%s in %s' % (exc, self.filename)) | [
"def",
"open",
"(",
"self",
",",
"mode",
")",
":",
"if",
"self",
".",
"hdf5",
"==",
"(",
")",
":",
"# not already open",
"kw",
"=",
"dict",
"(",
"mode",
"=",
"mode",
",",
"libver",
"=",
"'latest'",
")",
"if",
"mode",
"==",
"'r'",
":",
"kw",
"[",
... | Open the underlying .hdf5 file and the parent, if any | [
"Open",
"the",
"underlying",
".",
"hdf5",
"file",
"and",
"the",
"parent",
"if",
"any"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/datastore.py#L184-L195 | train | 214,160 |
gem/oq-engine | openquake/baselib/datastore.py | DataStore.create_dset | def create_dset(self, key, dtype, shape=(None,), compression=None,
fillvalue=0, attrs=None):
"""
Create a one-dimensional HDF5 dataset.
:param key: name of the dataset
:param dtype: dtype of the dataset (usually composite)
:param shape: shape of the dataset, possibly extendable
:param compression: the kind of HDF5 compression to use
:param attrs: dictionary of attributes of the dataset
:returns: a HDF5 dataset
"""
return hdf5.create(
self.hdf5, key, dtype, shape, compression, fillvalue, attrs) | python | def create_dset(self, key, dtype, shape=(None,), compression=None,
fillvalue=0, attrs=None):
"""
Create a one-dimensional HDF5 dataset.
:param key: name of the dataset
:param dtype: dtype of the dataset (usually composite)
:param shape: shape of the dataset, possibly extendable
:param compression: the kind of HDF5 compression to use
:param attrs: dictionary of attributes of the dataset
:returns: a HDF5 dataset
"""
return hdf5.create(
self.hdf5, key, dtype, shape, compression, fillvalue, attrs) | [
"def",
"create_dset",
"(",
"self",
",",
"key",
",",
"dtype",
",",
"shape",
"=",
"(",
"None",
",",
")",
",",
"compression",
"=",
"None",
",",
"fillvalue",
"=",
"0",
",",
"attrs",
"=",
"None",
")",
":",
"return",
"hdf5",
".",
"create",
"(",
"self",
... | Create a one-dimensional HDF5 dataset.
:param key: name of the dataset
:param dtype: dtype of the dataset (usually composite)
:param shape: shape of the dataset, possibly extendable
:param compression: the kind of HDF5 compression to use
:param attrs: dictionary of attributes of the dataset
:returns: a HDF5 dataset | [
"Create",
"a",
"one",
"-",
"dimensional",
"HDF5",
"dataset",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/datastore.py#L270-L283 | train | 214,161 |
gem/oq-engine | openquake/baselib/datastore.py | DataStore.extend | def extend(self, key, array, **attrs):
"""
Extend the dataset associated to the given key; create it if needed
:param key: name of the dataset
:param array: array to store
:param attrs: a dictionary of attributes
"""
try:
dset = self.hdf5[key]
except KeyError:
dset = hdf5.create(self.hdf5, key, array.dtype,
shape=(None,) + array.shape[1:])
hdf5.extend(dset, array)
for k, v in attrs.items():
dset.attrs[k] = v
return dset | python | def extend(self, key, array, **attrs):
"""
Extend the dataset associated to the given key; create it if needed
:param key: name of the dataset
:param array: array to store
:param attrs: a dictionary of attributes
"""
try:
dset = self.hdf5[key]
except KeyError:
dset = hdf5.create(self.hdf5, key, array.dtype,
shape=(None,) + array.shape[1:])
hdf5.extend(dset, array)
for k, v in attrs.items():
dset.attrs[k] = v
return dset | [
"def",
"extend",
"(",
"self",
",",
"key",
",",
"array",
",",
"*",
"*",
"attrs",
")",
":",
"try",
":",
"dset",
"=",
"self",
".",
"hdf5",
"[",
"key",
"]",
"except",
"KeyError",
":",
"dset",
"=",
"hdf5",
".",
"create",
"(",
"self",
".",
"hdf5",
",... | Extend the dataset associated to the given key; create it if needed
:param key: name of the dataset
:param array: array to store
:param attrs: a dictionary of attributes | [
"Extend",
"the",
"dataset",
"associated",
"to",
"the",
"given",
"key",
";",
"create",
"it",
"if",
"needed"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/datastore.py#L285-L301 | train | 214,162 |
gem/oq-engine | openquake/baselib/datastore.py | DataStore.save | def save(self, key, kw):
"""
Update the object associated to `key` with the `kw` dictionary;
works for LiteralAttrs objects and automatically flushes.
"""
if key not in self:
obj = hdf5.LiteralAttrs()
else:
obj = self[key]
vars(obj).update(kw)
self[key] = obj
self.flush() | python | def save(self, key, kw):
"""
Update the object associated to `key` with the `kw` dictionary;
works for LiteralAttrs objects and automatically flushes.
"""
if key not in self:
obj = hdf5.LiteralAttrs()
else:
obj = self[key]
vars(obj).update(kw)
self[key] = obj
self.flush() | [
"def",
"save",
"(",
"self",
",",
"key",
",",
"kw",
")",
":",
"if",
"key",
"not",
"in",
"self",
":",
"obj",
"=",
"hdf5",
".",
"LiteralAttrs",
"(",
")",
"else",
":",
"obj",
"=",
"self",
"[",
"key",
"]",
"vars",
"(",
"obj",
")",
".",
"update",
"... | Update the object associated to `key` with the `kw` dictionary;
works for LiteralAttrs objects and automatically flushes. | [
"Update",
"the",
"object",
"associated",
"to",
"key",
"with",
"the",
"kw",
"dictionary",
";",
"works",
"for",
"LiteralAttrs",
"objects",
"and",
"automatically",
"flushes",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/datastore.py#L303-L314 | train | 214,163 |
gem/oq-engine | openquake/baselib/datastore.py | DataStore.export_path | def export_path(self, relname, export_dir=None):
"""
Return the path of the exported file by adding the export_dir in
front, the calculation ID at the end.
:param relname: relative file name
:param export_dir: export directory (if None use .export_dir)
"""
# removing inner slashed to avoid creating intermediate directories
name, ext = relname.replace('/', '-').rsplit('.', 1)
newname = '%s_%s.%s' % (name, self.calc_id, ext)
if export_dir is None:
export_dir = self.export_dir
return os.path.join(export_dir, newname) | python | def export_path(self, relname, export_dir=None):
"""
Return the path of the exported file by adding the export_dir in
front, the calculation ID at the end.
:param relname: relative file name
:param export_dir: export directory (if None use .export_dir)
"""
# removing inner slashed to avoid creating intermediate directories
name, ext = relname.replace('/', '-').rsplit('.', 1)
newname = '%s_%s.%s' % (name, self.calc_id, ext)
if export_dir is None:
export_dir = self.export_dir
return os.path.join(export_dir, newname) | [
"def",
"export_path",
"(",
"self",
",",
"relname",
",",
"export_dir",
"=",
"None",
")",
":",
"# removing inner slashed to avoid creating intermediate directories",
"name",
",",
"ext",
"=",
"relname",
".",
"replace",
"(",
"'/'",
",",
"'-'",
")",
".",
"rsplit",
"(... | Return the path of the exported file by adding the export_dir in
front, the calculation ID at the end.
:param relname: relative file name
:param export_dir: export directory (if None use .export_dir) | [
"Return",
"the",
"path",
"of",
"the",
"exported",
"file",
"by",
"adding",
"the",
"export_dir",
"in",
"front",
"the",
"calculation",
"ID",
"at",
"the",
"end",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/datastore.py#L316-L329 | train | 214,164 |
gem/oq-engine | openquake/baselib/datastore.py | DataStore.build_fname | def build_fname(self, prefix, postfix, fmt, export_dir=None):
"""
Build a file name from a realization, by using prefix and extension.
:param prefix: the prefix to use
:param postfix: the postfix to use (can be a realization object)
:param fmt: the extension ('csv', 'xml', etc)
:param export_dir: export directory (if None use .export_dir)
:returns: relative pathname including the extension
"""
if hasattr(postfix, 'sm_lt_path'): # is a realization
fname = '%s-rlz-%03d.%s' % (prefix, postfix.ordinal, fmt)
else:
fname = prefix + ('-%s' % postfix if postfix else '') + '.' + fmt
return self.export_path(fname, export_dir) | python | def build_fname(self, prefix, postfix, fmt, export_dir=None):
"""
Build a file name from a realization, by using prefix and extension.
:param prefix: the prefix to use
:param postfix: the postfix to use (can be a realization object)
:param fmt: the extension ('csv', 'xml', etc)
:param export_dir: export directory (if None use .export_dir)
:returns: relative pathname including the extension
"""
if hasattr(postfix, 'sm_lt_path'): # is a realization
fname = '%s-rlz-%03d.%s' % (prefix, postfix.ordinal, fmt)
else:
fname = prefix + ('-%s' % postfix if postfix else '') + '.' + fmt
return self.export_path(fname, export_dir) | [
"def",
"build_fname",
"(",
"self",
",",
"prefix",
",",
"postfix",
",",
"fmt",
",",
"export_dir",
"=",
"None",
")",
":",
"if",
"hasattr",
"(",
"postfix",
",",
"'sm_lt_path'",
")",
":",
"# is a realization",
"fname",
"=",
"'%s-rlz-%03d.%s'",
"%",
"(",
"prefi... | Build a file name from a realization, by using prefix and extension.
:param prefix: the prefix to use
:param postfix: the postfix to use (can be a realization object)
:param fmt: the extension ('csv', 'xml', etc)
:param export_dir: export directory (if None use .export_dir)
:returns: relative pathname including the extension | [
"Build",
"a",
"file",
"name",
"from",
"a",
"realization",
"by",
"using",
"prefix",
"and",
"extension",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/datastore.py#L331-L345 | train | 214,165 |
gem/oq-engine | openquake/baselib/datastore.py | DataStore.flush | def flush(self):
"""Flush the underlying hdf5 file"""
if self.parent != ():
self.parent.flush()
if self.hdf5: # is open
self.hdf5.flush() | python | def flush(self):
"""Flush the underlying hdf5 file"""
if self.parent != ():
self.parent.flush()
if self.hdf5: # is open
self.hdf5.flush() | [
"def",
"flush",
"(",
"self",
")",
":",
"if",
"self",
".",
"parent",
"!=",
"(",
")",
":",
"self",
".",
"parent",
".",
"flush",
"(",
")",
"if",
"self",
".",
"hdf5",
":",
"# is open",
"self",
".",
"hdf5",
".",
"flush",
"(",
")"
] | Flush the underlying hdf5 file | [
"Flush",
"the",
"underlying",
"hdf5",
"file"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/datastore.py#L347-L352 | train | 214,166 |
gem/oq-engine | openquake/baselib/datastore.py | DataStore.close | def close(self):
"""Close the underlying hdf5 file"""
if self.parent != ():
self.parent.flush()
self.parent.close()
if self.hdf5: # is open
self.hdf5.flush()
self.hdf5.close()
self.hdf5 = () | python | def close(self):
"""Close the underlying hdf5 file"""
if self.parent != ():
self.parent.flush()
self.parent.close()
if self.hdf5: # is open
self.hdf5.flush()
self.hdf5.close()
self.hdf5 = () | [
"def",
"close",
"(",
"self",
")",
":",
"if",
"self",
".",
"parent",
"!=",
"(",
")",
":",
"self",
".",
"parent",
".",
"flush",
"(",
")",
"self",
".",
"parent",
".",
"close",
"(",
")",
"if",
"self",
".",
"hdf5",
":",
"# is open",
"self",
".",
"hd... | Close the underlying hdf5 file | [
"Close",
"the",
"underlying",
"hdf5",
"file"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/datastore.py#L354-L362 | train | 214,167 |
gem/oq-engine | openquake/baselib/datastore.py | DataStore.getsize | def getsize(self, key=None):
"""
Return the size in byte of the output associated to the given key.
If no key is given, returns the total size of all files.
"""
if key is None:
return os.path.getsize(self.filename)
return hdf5.ByteCounter.get_nbytes(
h5py.File.__getitem__(self.hdf5, key)) | python | def getsize(self, key=None):
"""
Return the size in byte of the output associated to the given key.
If no key is given, returns the total size of all files.
"""
if key is None:
return os.path.getsize(self.filename)
return hdf5.ByteCounter.get_nbytes(
h5py.File.__getitem__(self.hdf5, key)) | [
"def",
"getsize",
"(",
"self",
",",
"key",
"=",
"None",
")",
":",
"if",
"key",
"is",
"None",
":",
"return",
"os",
".",
"path",
".",
"getsize",
"(",
"self",
".",
"filename",
")",
"return",
"hdf5",
".",
"ByteCounter",
".",
"get_nbytes",
"(",
"h5py",
... | Return the size in byte of the output associated to the given key.
If no key is given, returns the total size of all files. | [
"Return",
"the",
"size",
"in",
"byte",
"of",
"the",
"output",
"associated",
"to",
"the",
"given",
"key",
".",
"If",
"no",
"key",
"is",
"given",
"returns",
"the",
"total",
"size",
"of",
"all",
"files",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/datastore.py#L369-L377 | train | 214,168 |
gem/oq-engine | openquake/baselib/hdf5.py | maybe_encode | def maybe_encode(value):
"""
If value is a sequence of strings, encode it
"""
if isinstance(value, (list, tuple)) and isinstance(value[0], str):
return encode(value)
return value | python | def maybe_encode(value):
"""
If value is a sequence of strings, encode it
"""
if isinstance(value, (list, tuple)) and isinstance(value[0], str):
return encode(value)
return value | [
"def",
"maybe_encode",
"(",
"value",
")",
":",
"if",
"isinstance",
"(",
"value",
",",
"(",
"list",
",",
"tuple",
")",
")",
"and",
"isinstance",
"(",
"value",
"[",
"0",
"]",
",",
"str",
")",
":",
"return",
"encode",
"(",
"value",
")",
"return",
"val... | If value is a sequence of strings, encode it | [
"If",
"value",
"is",
"a",
"sequence",
"of",
"strings",
"encode",
"it"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/hdf5.py#L44-L50 | train | 214,169 |
gem/oq-engine | openquake/baselib/hdf5.py | extend | def extend(dset, array, **attrs):
"""
Extend an extensible dataset with an array of a compatible dtype.
:param dset: an h5py dataset
:param array: an array of length L
:returns: the total length of the dataset (i.e. initial length + L)
"""
length = len(dset)
if len(array) == 0:
return length
newlength = length + len(array)
if array.dtype.name == 'object': # vlen array
shape = (newlength,) + preshape(array[0])
else:
shape = (newlength,) + array.shape[1:]
dset.resize(shape)
dset[length:newlength] = array
for key, val in attrs.items():
dset.attrs[key] = val
return newlength | python | def extend(dset, array, **attrs):
"""
Extend an extensible dataset with an array of a compatible dtype.
:param dset: an h5py dataset
:param array: an array of length L
:returns: the total length of the dataset (i.e. initial length + L)
"""
length = len(dset)
if len(array) == 0:
return length
newlength = length + len(array)
if array.dtype.name == 'object': # vlen array
shape = (newlength,) + preshape(array[0])
else:
shape = (newlength,) + array.shape[1:]
dset.resize(shape)
dset[length:newlength] = array
for key, val in attrs.items():
dset.attrs[key] = val
return newlength | [
"def",
"extend",
"(",
"dset",
",",
"array",
",",
"*",
"*",
"attrs",
")",
":",
"length",
"=",
"len",
"(",
"dset",
")",
"if",
"len",
"(",
"array",
")",
"==",
"0",
":",
"return",
"length",
"newlength",
"=",
"length",
"+",
"len",
"(",
"array",
")",
... | Extend an extensible dataset with an array of a compatible dtype.
:param dset: an h5py dataset
:param array: an array of length L
:returns: the total length of the dataset (i.e. initial length + L) | [
"Extend",
"an",
"extensible",
"dataset",
"with",
"an",
"array",
"of",
"a",
"compatible",
"dtype",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/hdf5.py#L86-L106 | train | 214,170 |
gem/oq-engine | openquake/baselib/hdf5.py | extend3 | def extend3(filename, key, array, **attrs):
"""
Extend an HDF5 file dataset with the given array
"""
with h5py.File(filename) as h5:
try:
dset = h5[key]
except KeyError:
if array.dtype.name == 'object': # vlen array
shape = (None,) + preshape(array[0])
else:
shape = (None,) + array.shape[1:]
dset = create(h5, key, array.dtype, shape)
length = extend(dset, array)
for key, val in attrs.items():
dset.attrs[key] = val
h5.flush()
return length | python | def extend3(filename, key, array, **attrs):
"""
Extend an HDF5 file dataset with the given array
"""
with h5py.File(filename) as h5:
try:
dset = h5[key]
except KeyError:
if array.dtype.name == 'object': # vlen array
shape = (None,) + preshape(array[0])
else:
shape = (None,) + array.shape[1:]
dset = create(h5, key, array.dtype, shape)
length = extend(dset, array)
for key, val in attrs.items():
dset.attrs[key] = val
h5.flush()
return length | [
"def",
"extend3",
"(",
"filename",
",",
"key",
",",
"array",
",",
"*",
"*",
"attrs",
")",
":",
"with",
"h5py",
".",
"File",
"(",
"filename",
")",
"as",
"h5",
":",
"try",
":",
"dset",
"=",
"h5",
"[",
"key",
"]",
"except",
"KeyError",
":",
"if",
... | Extend an HDF5 file dataset with the given array | [
"Extend",
"an",
"HDF5",
"file",
"dataset",
"with",
"the",
"given",
"array"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/hdf5.py#L109-L126 | train | 214,171 |
gem/oq-engine | openquake/baselib/hdf5.py | get_nbytes | def get_nbytes(dset):
"""
If the dataset has an attribute 'nbytes', return it. Otherwise get the size
of the underlying array. Returns None if the dataset is actually a group.
"""
if 'nbytes' in dset.attrs:
# look if the dataset has an attribute nbytes
return dset.attrs['nbytes']
elif hasattr(dset, 'dtype'):
# else extract nbytes from the underlying array
return dset.size * numpy.zeros(1, dset.dtype).nbytes | python | def get_nbytes(dset):
"""
If the dataset has an attribute 'nbytes', return it. Otherwise get the size
of the underlying array. Returns None if the dataset is actually a group.
"""
if 'nbytes' in dset.attrs:
# look if the dataset has an attribute nbytes
return dset.attrs['nbytes']
elif hasattr(dset, 'dtype'):
# else extract nbytes from the underlying array
return dset.size * numpy.zeros(1, dset.dtype).nbytes | [
"def",
"get_nbytes",
"(",
"dset",
")",
":",
"if",
"'nbytes'",
"in",
"dset",
".",
"attrs",
":",
"# look if the dataset has an attribute nbytes",
"return",
"dset",
".",
"attrs",
"[",
"'nbytes'",
"]",
"elif",
"hasattr",
"(",
"dset",
",",
"'dtype'",
")",
":",
"#... | If the dataset has an attribute 'nbytes', return it. Otherwise get the size
of the underlying array. Returns None if the dataset is actually a group. | [
"If",
"the",
"dataset",
"has",
"an",
"attribute",
"nbytes",
"return",
"it",
".",
"Otherwise",
"get",
"the",
"size",
"of",
"the",
"underlying",
"array",
".",
"Returns",
"None",
"if",
"the",
"dataset",
"is",
"actually",
"a",
"group",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/hdf5.py#L206-L216 | train | 214,172 |
gem/oq-engine | openquake/baselib/hdf5.py | decode_array | def decode_array(values):
"""
Decode the values which are bytestrings.
"""
out = []
for val in values:
try:
out.append(val.decode('utf8'))
except AttributeError:
out.append(val)
return out | python | def decode_array(values):
"""
Decode the values which are bytestrings.
"""
out = []
for val in values:
try:
out.append(val.decode('utf8'))
except AttributeError:
out.append(val)
return out | [
"def",
"decode_array",
"(",
"values",
")",
":",
"out",
"=",
"[",
"]",
"for",
"val",
"in",
"values",
":",
"try",
":",
"out",
".",
"append",
"(",
"val",
".",
"decode",
"(",
"'utf8'",
")",
")",
"except",
"AttributeError",
":",
"out",
".",
"append",
"(... | Decode the values which are bytestrings. | [
"Decode",
"the",
"values",
"which",
"are",
"bytestrings",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/hdf5.py#L585-L595 | train | 214,173 |
gem/oq-engine | openquake/baselib/hdf5.py | File.temporary | def temporary(cls):
"""
Returns a temporary hdf5 file, open for writing.
The temporary name is stored in the .path attribute.
It is the user responsability to remove the file when closed.
"""
fh, path = tempfile.mkstemp(suffix='.hdf5')
os.close(fh)
self = cls(path, 'w')
self.path = path
return self | python | def temporary(cls):
"""
Returns a temporary hdf5 file, open for writing.
The temporary name is stored in the .path attribute.
It is the user responsability to remove the file when closed.
"""
fh, path = tempfile.mkstemp(suffix='.hdf5')
os.close(fh)
self = cls(path, 'w')
self.path = path
return self | [
"def",
"temporary",
"(",
"cls",
")",
":",
"fh",
",",
"path",
"=",
"tempfile",
".",
"mkstemp",
"(",
"suffix",
"=",
"'.hdf5'",
")",
"os",
".",
"close",
"(",
"fh",
")",
"self",
"=",
"cls",
"(",
"path",
",",
"'w'",
")",
"self",
".",
"path",
"=",
"p... | Returns a temporary hdf5 file, open for writing.
The temporary name is stored in the .path attribute.
It is the user responsability to remove the file when closed. | [
"Returns",
"a",
"temporary",
"hdf5",
"file",
"open",
"for",
"writing",
".",
"The",
"temporary",
"name",
"is",
"stored",
"in",
"the",
".",
"path",
"attribute",
".",
"It",
"is",
"the",
"user",
"responsability",
"to",
"remove",
"the",
"file",
"when",
"closed"... | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/hdf5.py#L280-L290 | train | 214,174 |
gem/oq-engine | openquake/baselib/hdf5.py | File.save_vlen | def save_vlen(self, key, data):
"""
Save a sequence of variable-length arrays
:param key: name of the dataset
:param data: data to store as a list of arrays
"""
shape = (None,) + data[0].shape[:-1]
try:
dset = self[key]
except KeyError:
vdt = h5py.special_dtype(vlen=data[0].dtype)
dset = create(self, key, vdt, shape, fillvalue=None)
nbytes = dset.attrs.get('nbytes', 0)
totlen = dset.attrs.get('totlen', 0)
for i, val in enumerate(data):
nbytes += val.nbytes
totlen += len(val)
length = len(dset)
dset.resize((length + len(data),) + shape[1:])
for i, arr in enumerate(data):
dset[length + i] = arr
dset.attrs['nbytes'] = nbytes
dset.attrs['totlen'] = totlen | python | def save_vlen(self, key, data):
"""
Save a sequence of variable-length arrays
:param key: name of the dataset
:param data: data to store as a list of arrays
"""
shape = (None,) + data[0].shape[:-1]
try:
dset = self[key]
except KeyError:
vdt = h5py.special_dtype(vlen=data[0].dtype)
dset = create(self, key, vdt, shape, fillvalue=None)
nbytes = dset.attrs.get('nbytes', 0)
totlen = dset.attrs.get('totlen', 0)
for i, val in enumerate(data):
nbytes += val.nbytes
totlen += len(val)
length = len(dset)
dset.resize((length + len(data),) + shape[1:])
for i, arr in enumerate(data):
dset[length + i] = arr
dset.attrs['nbytes'] = nbytes
dset.attrs['totlen'] = totlen | [
"def",
"save_vlen",
"(",
"self",
",",
"key",
",",
"data",
")",
":",
"shape",
"=",
"(",
"None",
",",
")",
"+",
"data",
"[",
"0",
"]",
".",
"shape",
"[",
":",
"-",
"1",
"]",
"try",
":",
"dset",
"=",
"self",
"[",
"key",
"]",
"except",
"KeyError"... | Save a sequence of variable-length arrays
:param key: name of the dataset
:param data: data to store as a list of arrays | [
"Save",
"a",
"sequence",
"of",
"variable",
"-",
"length",
"arrays"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/hdf5.py#L292-L315 | train | 214,175 |
gem/oq-engine | openquake/baselib/hdf5.py | File.set_nbytes | def set_nbytes(self, key, nbytes=None):
"""
Set the `nbytes` attribute on the HDF5 object identified by `key`.
"""
obj = super().__getitem__(key)
if nbytes is not None: # size set from outside
obj.attrs['nbytes'] = nbytes
else: # recursively determine the size of the datagroup
obj.attrs['nbytes'] = nbytes = ByteCounter.get_nbytes(obj)
return nbytes | python | def set_nbytes(self, key, nbytes=None):
"""
Set the `nbytes` attribute on the HDF5 object identified by `key`.
"""
obj = super().__getitem__(key)
if nbytes is not None: # size set from outside
obj.attrs['nbytes'] = nbytes
else: # recursively determine the size of the datagroup
obj.attrs['nbytes'] = nbytes = ByteCounter.get_nbytes(obj)
return nbytes | [
"def",
"set_nbytes",
"(",
"self",
",",
"key",
",",
"nbytes",
"=",
"None",
")",
":",
"obj",
"=",
"super",
"(",
")",
".",
"__getitem__",
"(",
"key",
")",
"if",
"nbytes",
"is",
"not",
"None",
":",
"# size set from outside",
"obj",
".",
"attrs",
"[",
"'n... | Set the `nbytes` attribute on the HDF5 object identified by `key`. | [
"Set",
"the",
"nbytes",
"attribute",
"on",
"the",
"HDF5",
"object",
"identified",
"by",
"key",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/hdf5.py#L376-L385 | train | 214,176 |
gem/oq-engine | openquake/hazardlib/gsim/can15/eastern.py | EasternCan15Mid._get_delta | def _get_delta(self, stds, dists):
"""
Computes the additional delta to be used for the computation of the
upp and low models
"""
delta = np.maximum((0.1-0.001*dists.repi), np.zeros_like(dists.repi))
return delta | python | def _get_delta(self, stds, dists):
"""
Computes the additional delta to be used for the computation of the
upp and low models
"""
delta = np.maximum((0.1-0.001*dists.repi), np.zeros_like(dists.repi))
return delta | [
"def",
"_get_delta",
"(",
"self",
",",
"stds",
",",
"dists",
")",
":",
"delta",
"=",
"np",
".",
"maximum",
"(",
"(",
"0.1",
"-",
"0.001",
"*",
"dists",
".",
"repi",
")",
",",
"np",
".",
"zeros_like",
"(",
"dists",
".",
"repi",
")",
")",
"return",... | Computes the additional delta to be used for the computation of the
upp and low models | [
"Computes",
"the",
"additional",
"delta",
"to",
"be",
"used",
"for",
"the",
"computation",
"of",
"the",
"upp",
"and",
"low",
"models"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/can15/eastern.py#L91-L97 | train | 214,177 |
gem/oq-engine | openquake/hazardlib/gsim/can15/eastern.py | EasternCan15Mid._get_mean_and_stddevs | def _get_mean_and_stddevs(self, sites, rup, dists, imt, stddev_types):
"""
Returns only the mean values.
See documentation for method `GroundShakingIntensityModel` in
:class:~`openquake.hazardlib.gsim.base.GSIM`
"""
# distances
distsl = copy.copy(dists)
distsl.rjb, distsl.rrup = \
utils.get_equivalent_distances_east(rup.mag, dists.repi)
#
# Pezeshk et al. 2011 - Rrup
mean1, stds1 = super().get_mean_and_stddevs(sites, rup, distsl, imt,
stddev_types)
mean1 = self.apply_correction_to_BC(mean1, imt, distsl)
#
# Atkinson 2008 - Rjb
gmpe = Atkinson2008prime()
mean2, stds2 = gmpe.get_mean_and_stddevs(sites, rup, distsl, imt,
stddev_types)
#
# Silva et al. 2002 - Rjb
gmpe = SilvaEtAl2002SingleCornerSaturation()
mean4, stds4 = gmpe.get_mean_and_stddevs(sites, rup, distsl, imt,
stddev_types)
mean4 = self.apply_correction_to_BC(mean4, imt, distsl)
#
# Silva et al. 2002 - Rjb
gmpe = SilvaEtAl2002DoubleCornerSaturation()
mean5, stds5 = gmpe.get_mean_and_stddevs(sites, rup, distsl, imt,
stddev_types)
mean5 = self.apply_correction_to_BC(mean5, imt, distsl)
#
# distances
distsl.rjb, distsl.rrup = \
utils.get_equivalent_distances_east(rup.mag, dists.repi, ab06=True)
#
# Atkinson and Boore 2006 - Rrup
gmpe = AtkinsonBoore2006Modified2011()
mean3, stds3 = gmpe.get_mean_and_stddevs(sites, rup, distsl, imt,
stddev_types)
# Computing adjusted mean and stds
mean_adj = mean1*0.2 + mean2*0.2 + mean3*0.2 + mean4*0.2 + mean5*0.2
# Note that in this case we do not apply a triangular smoothing on
# distance as explained at page 996 of Atkinson and Adams (2013)
# for the calculation of the standard deviation
stds_adj = np.log(np.exp(stds1)*0.2 + np.exp(stds2)*0.2 +
np.exp(stds3)*0.2 + np.exp(stds4)*0.2 +
np.exp(stds5)*0.2)
#
return mean_adj, stds_adj | python | def _get_mean_and_stddevs(self, sites, rup, dists, imt, stddev_types):
"""
Returns only the mean values.
See documentation for method `GroundShakingIntensityModel` in
:class:~`openquake.hazardlib.gsim.base.GSIM`
"""
# distances
distsl = copy.copy(dists)
distsl.rjb, distsl.rrup = \
utils.get_equivalent_distances_east(rup.mag, dists.repi)
#
# Pezeshk et al. 2011 - Rrup
mean1, stds1 = super().get_mean_and_stddevs(sites, rup, distsl, imt,
stddev_types)
mean1 = self.apply_correction_to_BC(mean1, imt, distsl)
#
# Atkinson 2008 - Rjb
gmpe = Atkinson2008prime()
mean2, stds2 = gmpe.get_mean_and_stddevs(sites, rup, distsl, imt,
stddev_types)
#
# Silva et al. 2002 - Rjb
gmpe = SilvaEtAl2002SingleCornerSaturation()
mean4, stds4 = gmpe.get_mean_and_stddevs(sites, rup, distsl, imt,
stddev_types)
mean4 = self.apply_correction_to_BC(mean4, imt, distsl)
#
# Silva et al. 2002 - Rjb
gmpe = SilvaEtAl2002DoubleCornerSaturation()
mean5, stds5 = gmpe.get_mean_and_stddevs(sites, rup, distsl, imt,
stddev_types)
mean5 = self.apply_correction_to_BC(mean5, imt, distsl)
#
# distances
distsl.rjb, distsl.rrup = \
utils.get_equivalent_distances_east(rup.mag, dists.repi, ab06=True)
#
# Atkinson and Boore 2006 - Rrup
gmpe = AtkinsonBoore2006Modified2011()
mean3, stds3 = gmpe.get_mean_and_stddevs(sites, rup, distsl, imt,
stddev_types)
# Computing adjusted mean and stds
mean_adj = mean1*0.2 + mean2*0.2 + mean3*0.2 + mean4*0.2 + mean5*0.2
# Note that in this case we do not apply a triangular smoothing on
# distance as explained at page 996 of Atkinson and Adams (2013)
# for the calculation of the standard deviation
stds_adj = np.log(np.exp(stds1)*0.2 + np.exp(stds2)*0.2 +
np.exp(stds3)*0.2 + np.exp(stds4)*0.2 +
np.exp(stds5)*0.2)
#
return mean_adj, stds_adj | [
"def",
"_get_mean_and_stddevs",
"(",
"self",
",",
"sites",
",",
"rup",
",",
"dists",
",",
"imt",
",",
"stddev_types",
")",
":",
"# distances",
"distsl",
"=",
"copy",
".",
"copy",
"(",
"dists",
")",
"distsl",
".",
"rjb",
",",
"distsl",
".",
"rrup",
"=",... | Returns only the mean values.
See documentation for method `GroundShakingIntensityModel` in
:class:~`openquake.hazardlib.gsim.base.GSIM` | [
"Returns",
"only",
"the",
"mean",
"values",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/can15/eastern.py#L99-L151 | train | 214,178 |
aaugustin/websockets | src/websockets/protocol.py | WebSocketCommonProtocol.ensure_open | async def ensure_open(self) -> None:
"""
Check that the WebSocket connection is open.
Raise :exc:`~websockets.exceptions.ConnectionClosed` if it isn't.
"""
# Handle cases from most common to least common for performance.
if self.state is State.OPEN:
# If self.transfer_data_task exited without a closing handshake,
# self.close_connection_task may be closing it, going straight
# from OPEN to CLOSED.
if self.transfer_data_task.done():
await asyncio.shield(self.close_connection_task)
raise ConnectionClosed(
self.close_code, self.close_reason
) from self.transfer_data_exc
else:
return
if self.state is State.CLOSED:
raise ConnectionClosed(
self.close_code, self.close_reason
) from self.transfer_data_exc
if self.state is State.CLOSING:
# If we started the closing handshake, wait for its completion to
# get the proper close code and status. self.close_connection_task
# will complete within 4 or 5 * close_timeout after close(). The
# CLOSING state also occurs when failing the connection. In that
# case self.close_connection_task will complete even faster.
await asyncio.shield(self.close_connection_task)
raise ConnectionClosed(
self.close_code, self.close_reason
) from self.transfer_data_exc
# Control may only reach this point in buggy third-party subclasses.
assert self.state is State.CONNECTING
raise InvalidState("WebSocket connection isn't established yet") | python | async def ensure_open(self) -> None:
"""
Check that the WebSocket connection is open.
Raise :exc:`~websockets.exceptions.ConnectionClosed` if it isn't.
"""
# Handle cases from most common to least common for performance.
if self.state is State.OPEN:
# If self.transfer_data_task exited without a closing handshake,
# self.close_connection_task may be closing it, going straight
# from OPEN to CLOSED.
if self.transfer_data_task.done():
await asyncio.shield(self.close_connection_task)
raise ConnectionClosed(
self.close_code, self.close_reason
) from self.transfer_data_exc
else:
return
if self.state is State.CLOSED:
raise ConnectionClosed(
self.close_code, self.close_reason
) from self.transfer_data_exc
if self.state is State.CLOSING:
# If we started the closing handshake, wait for its completion to
# get the proper close code and status. self.close_connection_task
# will complete within 4 or 5 * close_timeout after close(). The
# CLOSING state also occurs when failing the connection. In that
# case self.close_connection_task will complete even faster.
await asyncio.shield(self.close_connection_task)
raise ConnectionClosed(
self.close_code, self.close_reason
) from self.transfer_data_exc
# Control may only reach this point in buggy third-party subclasses.
assert self.state is State.CONNECTING
raise InvalidState("WebSocket connection isn't established yet") | [
"async",
"def",
"ensure_open",
"(",
"self",
")",
"->",
"None",
":",
"# Handle cases from most common to least common for performance.",
"if",
"self",
".",
"state",
"is",
"State",
".",
"OPEN",
":",
"# If self.transfer_data_task exited without a closing handshake,",
"# self.clo... | Check that the WebSocket connection is open.
Raise :exc:`~websockets.exceptions.ConnectionClosed` if it isn't. | [
"Check",
"that",
"the",
"WebSocket",
"connection",
"is",
"open",
"."
] | 17b3f47549b6f752a1be07fa1ba3037cb59c7d56 | https://github.com/aaugustin/websockets/blob/17b3f47549b6f752a1be07fa1ba3037cb59c7d56/src/websockets/protocol.py#L662-L700 | train | 214,179 |
aaugustin/websockets | src/websockets/protocol.py | WebSocketCommonProtocol.transfer_data | async def transfer_data(self) -> None:
"""
Read incoming messages and put them in a queue.
This coroutine runs in a task until the closing handshake is started.
"""
try:
while True:
message = await self.read_message()
# Exit the loop when receiving a close frame.
if message is None:
break
# Wait until there's room in the queue (if necessary).
if self.max_queue is not None:
while len(self.messages) >= self.max_queue:
self._put_message_waiter = self.loop.create_future()
try:
await self._put_message_waiter
finally:
self._put_message_waiter = None
# Put the message in the queue.
self.messages.append(message)
# Notify recv().
if self._pop_message_waiter is not None:
self._pop_message_waiter.set_result(None)
self._pop_message_waiter = None
except asyncio.CancelledError as exc:
self.transfer_data_exc = exc
# If fail_connection() cancels this task, avoid logging the error
# twice and failing the connection again.
raise
except WebSocketProtocolError as exc:
self.transfer_data_exc = exc
self.fail_connection(1002)
except (ConnectionError, EOFError) as exc:
# Reading data with self.reader.readexactly may raise:
# - most subclasses of ConnectionError if the TCP connection
# breaks, is reset, or is aborted;
# - IncompleteReadError, a subclass of EOFError, if fewer
# bytes are available than requested.
self.transfer_data_exc = exc
self.fail_connection(1006)
except UnicodeDecodeError as exc:
self.transfer_data_exc = exc
self.fail_connection(1007)
except PayloadTooBig as exc:
self.transfer_data_exc = exc
self.fail_connection(1009)
except Exception as exc:
# This shouldn't happen often because exceptions expected under
# regular circumstances are handled above. If it does, consider
# catching and handling more exceptions.
logger.error("Error in data transfer", exc_info=True)
self.transfer_data_exc = exc
self.fail_connection(1011) | python | async def transfer_data(self) -> None:
"""
Read incoming messages and put them in a queue.
This coroutine runs in a task until the closing handshake is started.
"""
try:
while True:
message = await self.read_message()
# Exit the loop when receiving a close frame.
if message is None:
break
# Wait until there's room in the queue (if necessary).
if self.max_queue is not None:
while len(self.messages) >= self.max_queue:
self._put_message_waiter = self.loop.create_future()
try:
await self._put_message_waiter
finally:
self._put_message_waiter = None
# Put the message in the queue.
self.messages.append(message)
# Notify recv().
if self._pop_message_waiter is not None:
self._pop_message_waiter.set_result(None)
self._pop_message_waiter = None
except asyncio.CancelledError as exc:
self.transfer_data_exc = exc
# If fail_connection() cancels this task, avoid logging the error
# twice and failing the connection again.
raise
except WebSocketProtocolError as exc:
self.transfer_data_exc = exc
self.fail_connection(1002)
except (ConnectionError, EOFError) as exc:
# Reading data with self.reader.readexactly may raise:
# - most subclasses of ConnectionError if the TCP connection
# breaks, is reset, or is aborted;
# - IncompleteReadError, a subclass of EOFError, if fewer
# bytes are available than requested.
self.transfer_data_exc = exc
self.fail_connection(1006)
except UnicodeDecodeError as exc:
self.transfer_data_exc = exc
self.fail_connection(1007)
except PayloadTooBig as exc:
self.transfer_data_exc = exc
self.fail_connection(1009)
except Exception as exc:
# This shouldn't happen often because exceptions expected under
# regular circumstances are handled above. If it does, consider
# catching and handling more exceptions.
logger.error("Error in data transfer", exc_info=True)
self.transfer_data_exc = exc
self.fail_connection(1011) | [
"async",
"def",
"transfer_data",
"(",
"self",
")",
"->",
"None",
":",
"try",
":",
"while",
"True",
":",
"message",
"=",
"await",
"self",
".",
"read_message",
"(",
")",
"# Exit the loop when receiving a close frame.",
"if",
"message",
"is",
"None",
":",
"break"... | Read incoming messages and put them in a queue.
This coroutine runs in a task until the closing handshake is started. | [
"Read",
"incoming",
"messages",
"and",
"put",
"them",
"in",
"a",
"queue",
"."
] | 17b3f47549b6f752a1be07fa1ba3037cb59c7d56 | https://github.com/aaugustin/websockets/blob/17b3f47549b6f752a1be07fa1ba3037cb59c7d56/src/websockets/protocol.py#L702-L768 | train | 214,180 |
aaugustin/websockets | src/websockets/protocol.py | WebSocketCommonProtocol.read_message | async def read_message(self) -> Optional[Data]:
"""
Read a single message from the connection.
Re-assemble data frames if the message is fragmented.
Return ``None`` when the closing handshake is started.
"""
frame = await self.read_data_frame(max_size=self.max_size)
# A close frame was received.
if frame is None:
return None
if frame.opcode == OP_TEXT:
text = True
elif frame.opcode == OP_BINARY:
text = False
else: # frame.opcode == OP_CONT
raise WebSocketProtocolError("Unexpected opcode")
# Shortcut for the common case - no fragmentation
if frame.fin:
return frame.data.decode("utf-8") if text else frame.data
# 5.4. Fragmentation
chunks: List[Data] = []
max_size = self.max_size
if text:
decoder_factory = codecs.getincrementaldecoder("utf-8")
# https://github.com/python/typeshed/pull/2752
decoder = decoder_factory(errors="strict") # type: ignore
if max_size is None:
def append(frame: Frame) -> None:
nonlocal chunks
chunks.append(decoder.decode(frame.data, frame.fin))
else:
def append(frame: Frame) -> None:
nonlocal chunks, max_size
chunks.append(decoder.decode(frame.data, frame.fin))
max_size -= len(frame.data)
else:
if max_size is None:
def append(frame: Frame) -> None:
nonlocal chunks
chunks.append(frame.data)
else:
def append(frame: Frame) -> None:
nonlocal chunks, max_size
chunks.append(frame.data)
max_size -= len(frame.data)
append(frame)
while not frame.fin:
frame = await self.read_data_frame(max_size=max_size)
if frame is None:
raise WebSocketProtocolError("Incomplete fragmented message")
if frame.opcode != OP_CONT:
raise WebSocketProtocolError("Unexpected opcode")
append(frame)
# mypy cannot figure out that chunks have the proper type.
return ("" if text else b"").join(chunks) | python | async def read_message(self) -> Optional[Data]:
"""
Read a single message from the connection.
Re-assemble data frames if the message is fragmented.
Return ``None`` when the closing handshake is started.
"""
frame = await self.read_data_frame(max_size=self.max_size)
# A close frame was received.
if frame is None:
return None
if frame.opcode == OP_TEXT:
text = True
elif frame.opcode == OP_BINARY:
text = False
else: # frame.opcode == OP_CONT
raise WebSocketProtocolError("Unexpected opcode")
# Shortcut for the common case - no fragmentation
if frame.fin:
return frame.data.decode("utf-8") if text else frame.data
# 5.4. Fragmentation
chunks: List[Data] = []
max_size = self.max_size
if text:
decoder_factory = codecs.getincrementaldecoder("utf-8")
# https://github.com/python/typeshed/pull/2752
decoder = decoder_factory(errors="strict") # type: ignore
if max_size is None:
def append(frame: Frame) -> None:
nonlocal chunks
chunks.append(decoder.decode(frame.data, frame.fin))
else:
def append(frame: Frame) -> None:
nonlocal chunks, max_size
chunks.append(decoder.decode(frame.data, frame.fin))
max_size -= len(frame.data)
else:
if max_size is None:
def append(frame: Frame) -> None:
nonlocal chunks
chunks.append(frame.data)
else:
def append(frame: Frame) -> None:
nonlocal chunks, max_size
chunks.append(frame.data)
max_size -= len(frame.data)
append(frame)
while not frame.fin:
frame = await self.read_data_frame(max_size=max_size)
if frame is None:
raise WebSocketProtocolError("Incomplete fragmented message")
if frame.opcode != OP_CONT:
raise WebSocketProtocolError("Unexpected opcode")
append(frame)
# mypy cannot figure out that chunks have the proper type.
return ("" if text else b"").join(chunks) | [
"async",
"def",
"read_message",
"(",
"self",
")",
"->",
"Optional",
"[",
"Data",
"]",
":",
"frame",
"=",
"await",
"self",
".",
"read_data_frame",
"(",
"max_size",
"=",
"self",
".",
"max_size",
")",
"# A close frame was received.",
"if",
"frame",
"is",
"None"... | Read a single message from the connection.
Re-assemble data frames if the message is fragmented.
Return ``None`` when the closing handshake is started. | [
"Read",
"a",
"single",
"message",
"from",
"the",
"connection",
"."
] | 17b3f47549b6f752a1be07fa1ba3037cb59c7d56 | https://github.com/aaugustin/websockets/blob/17b3f47549b6f752a1be07fa1ba3037cb59c7d56/src/websockets/protocol.py#L770-L841 | train | 214,181 |
aaugustin/websockets | src/websockets/protocol.py | WebSocketCommonProtocol.read_data_frame | async def read_data_frame(self, max_size: int) -> Optional[Frame]:
"""
Read a single data frame from the connection.
Process control frames received before the next data frame.
Return ``None`` if a close frame is encountered before any data frame.
"""
# 6.2. Receiving Data
while True:
frame = await self.read_frame(max_size)
# 5.5. Control Frames
if frame.opcode == OP_CLOSE:
# 7.1.5. The WebSocket Connection Close Code
# 7.1.6. The WebSocket Connection Close Reason
self.close_code, self.close_reason = parse_close(frame.data)
# Echo the original data instead of re-serializing it with
# serialize_close() because that fails when the close frame is
# empty and parse_close() synthetizes a 1005 close code.
await self.write_close_frame(frame.data)
return None
elif frame.opcode == OP_PING:
# Answer pings.
ping_hex = frame.data.hex() or "[empty]"
logger.debug(
"%s - received ping, sending pong: %s", self.side, ping_hex
)
await self.pong(frame.data)
elif frame.opcode == OP_PONG:
# Acknowledge pings on solicited pongs.
if frame.data in self.pings:
# Acknowledge all pings up to the one matching this pong.
ping_id = None
ping_ids = []
while ping_id != frame.data:
ping_id, pong_waiter = self.pings.popitem(last=False)
ping_ids.append(ping_id)
pong_waiter.set_result(None)
pong_hex = binascii.hexlify(frame.data).decode() or "[empty]"
logger.debug(
"%s - received solicited pong: %s", self.side, pong_hex
)
ping_ids = ping_ids[:-1]
if ping_ids:
pings_hex = ", ".join(
binascii.hexlify(ping_id).decode() or "[empty]"
for ping_id in ping_ids
)
plural = "s" if len(ping_ids) > 1 else ""
logger.debug(
"%s - acknowledged previous ping%s: %s",
self.side,
plural,
pings_hex,
)
else:
pong_hex = binascii.hexlify(frame.data).decode() or "[empty]"
logger.debug(
"%s - received unsolicited pong: %s", self.side, pong_hex
)
# 5.6. Data Frames
else:
return frame | python | async def read_data_frame(self, max_size: int) -> Optional[Frame]:
"""
Read a single data frame from the connection.
Process control frames received before the next data frame.
Return ``None`` if a close frame is encountered before any data frame.
"""
# 6.2. Receiving Data
while True:
frame = await self.read_frame(max_size)
# 5.5. Control Frames
if frame.opcode == OP_CLOSE:
# 7.1.5. The WebSocket Connection Close Code
# 7.1.6. The WebSocket Connection Close Reason
self.close_code, self.close_reason = parse_close(frame.data)
# Echo the original data instead of re-serializing it with
# serialize_close() because that fails when the close frame is
# empty and parse_close() synthetizes a 1005 close code.
await self.write_close_frame(frame.data)
return None
elif frame.opcode == OP_PING:
# Answer pings.
ping_hex = frame.data.hex() or "[empty]"
logger.debug(
"%s - received ping, sending pong: %s", self.side, ping_hex
)
await self.pong(frame.data)
elif frame.opcode == OP_PONG:
# Acknowledge pings on solicited pongs.
if frame.data in self.pings:
# Acknowledge all pings up to the one matching this pong.
ping_id = None
ping_ids = []
while ping_id != frame.data:
ping_id, pong_waiter = self.pings.popitem(last=False)
ping_ids.append(ping_id)
pong_waiter.set_result(None)
pong_hex = binascii.hexlify(frame.data).decode() or "[empty]"
logger.debug(
"%s - received solicited pong: %s", self.side, pong_hex
)
ping_ids = ping_ids[:-1]
if ping_ids:
pings_hex = ", ".join(
binascii.hexlify(ping_id).decode() or "[empty]"
for ping_id in ping_ids
)
plural = "s" if len(ping_ids) > 1 else ""
logger.debug(
"%s - acknowledged previous ping%s: %s",
self.side,
plural,
pings_hex,
)
else:
pong_hex = binascii.hexlify(frame.data).decode() or "[empty]"
logger.debug(
"%s - received unsolicited pong: %s", self.side, pong_hex
)
# 5.6. Data Frames
else:
return frame | [
"async",
"def",
"read_data_frame",
"(",
"self",
",",
"max_size",
":",
"int",
")",
"->",
"Optional",
"[",
"Frame",
"]",
":",
"# 6.2. Receiving Data",
"while",
"True",
":",
"frame",
"=",
"await",
"self",
".",
"read_frame",
"(",
"max_size",
")",
"# 5.5. Control... | Read a single data frame from the connection.
Process control frames received before the next data frame.
Return ``None`` if a close frame is encountered before any data frame. | [
"Read",
"a",
"single",
"data",
"frame",
"from",
"the",
"connection",
"."
] | 17b3f47549b6f752a1be07fa1ba3037cb59c7d56 | https://github.com/aaugustin/websockets/blob/17b3f47549b6f752a1be07fa1ba3037cb59c7d56/src/websockets/protocol.py#L843-L910 | train | 214,182 |
aaugustin/websockets | src/websockets/protocol.py | WebSocketCommonProtocol.read_frame | async def read_frame(self, max_size: int) -> Frame:
"""
Read a single frame from the connection.
"""
frame = await Frame.read(
self.reader.readexactly,
mask=not self.is_client,
max_size=max_size,
extensions=self.extensions,
)
logger.debug("%s < %r", self.side, frame)
return frame | python | async def read_frame(self, max_size: int) -> Frame:
"""
Read a single frame from the connection.
"""
frame = await Frame.read(
self.reader.readexactly,
mask=not self.is_client,
max_size=max_size,
extensions=self.extensions,
)
logger.debug("%s < %r", self.side, frame)
return frame | [
"async",
"def",
"read_frame",
"(",
"self",
",",
"max_size",
":",
"int",
")",
"->",
"Frame",
":",
"frame",
"=",
"await",
"Frame",
".",
"read",
"(",
"self",
".",
"reader",
".",
"readexactly",
",",
"mask",
"=",
"not",
"self",
".",
"is_client",
",",
"max... | Read a single frame from the connection. | [
"Read",
"a",
"single",
"frame",
"from",
"the",
"connection",
"."
] | 17b3f47549b6f752a1be07fa1ba3037cb59c7d56 | https://github.com/aaugustin/websockets/blob/17b3f47549b6f752a1be07fa1ba3037cb59c7d56/src/websockets/protocol.py#L912-L924 | train | 214,183 |
aaugustin/websockets | src/websockets/protocol.py | WebSocketCommonProtocol.write_close_frame | async def write_close_frame(self, data: bytes = b"") -> None:
"""
Write a close frame if and only if the connection state is OPEN.
This dedicated coroutine must be used for writing close frames to
ensure that at most one close frame is sent on a given connection.
"""
# Test and set the connection state before sending the close frame to
# avoid sending two frames in case of concurrent calls.
if self.state is State.OPEN:
# 7.1.3. The WebSocket Closing Handshake is Started
self.state = State.CLOSING
logger.debug("%s - state = CLOSING", self.side)
# 7.1.2. Start the WebSocket Closing Handshake
await self.write_frame(True, OP_CLOSE, data, _expected_state=State.CLOSING) | python | async def write_close_frame(self, data: bytes = b"") -> None:
"""
Write a close frame if and only if the connection state is OPEN.
This dedicated coroutine must be used for writing close frames to
ensure that at most one close frame is sent on a given connection.
"""
# Test and set the connection state before sending the close frame to
# avoid sending two frames in case of concurrent calls.
if self.state is State.OPEN:
# 7.1.3. The WebSocket Closing Handshake is Started
self.state = State.CLOSING
logger.debug("%s - state = CLOSING", self.side)
# 7.1.2. Start the WebSocket Closing Handshake
await self.write_frame(True, OP_CLOSE, data, _expected_state=State.CLOSING) | [
"async",
"def",
"write_close_frame",
"(",
"self",
",",
"data",
":",
"bytes",
"=",
"b\"\"",
")",
"->",
"None",
":",
"# Test and set the connection state before sending the close frame to",
"# avoid sending two frames in case of concurrent calls.",
"if",
"self",
".",
"state",
... | Write a close frame if and only if the connection state is OPEN.
This dedicated coroutine must be used for writing close frames to
ensure that at most one close frame is sent on a given connection. | [
"Write",
"a",
"close",
"frame",
"if",
"and",
"only",
"if",
"the",
"connection",
"state",
"is",
"OPEN",
"."
] | 17b3f47549b6f752a1be07fa1ba3037cb59c7d56 | https://github.com/aaugustin/websockets/blob/17b3f47549b6f752a1be07fa1ba3037cb59c7d56/src/websockets/protocol.py#L953-L969 | train | 214,184 |
aaugustin/websockets | src/websockets/protocol.py | WebSocketCommonProtocol.keepalive_ping | async def keepalive_ping(self) -> None:
"""
Send a Ping frame and wait for a Pong frame at regular intervals.
This coroutine exits when the connection terminates and one of the
following happens:
- :meth:`ping` raises :exc:`ConnectionClosed`, or
- :meth:`close_connection` cancels :attr:`keepalive_ping_task`.
"""
if self.ping_interval is None:
return
try:
while True:
await asyncio.sleep(self.ping_interval, loop=self.loop)
# ping() cannot raise ConnectionClosed, only CancelledError:
# - If the connection is CLOSING, keepalive_ping_task will be
# canceled by close_connection() before ping() returns.
# - If the connection is CLOSED, keepalive_ping_task must be
# canceled already.
ping_waiter = await self.ping()
if self.ping_timeout is not None:
try:
await asyncio.wait_for(
ping_waiter, self.ping_timeout, loop=self.loop
)
except asyncio.TimeoutError:
logger.debug("%s ! timed out waiting for pong", self.side)
self.fail_connection(1011)
break
except asyncio.CancelledError:
raise
except Exception:
logger.warning("Unexpected exception in keepalive ping task", exc_info=True) | python | async def keepalive_ping(self) -> None:
"""
Send a Ping frame and wait for a Pong frame at regular intervals.
This coroutine exits when the connection terminates and one of the
following happens:
- :meth:`ping` raises :exc:`ConnectionClosed`, or
- :meth:`close_connection` cancels :attr:`keepalive_ping_task`.
"""
if self.ping_interval is None:
return
try:
while True:
await asyncio.sleep(self.ping_interval, loop=self.loop)
# ping() cannot raise ConnectionClosed, only CancelledError:
# - If the connection is CLOSING, keepalive_ping_task will be
# canceled by close_connection() before ping() returns.
# - If the connection is CLOSED, keepalive_ping_task must be
# canceled already.
ping_waiter = await self.ping()
if self.ping_timeout is not None:
try:
await asyncio.wait_for(
ping_waiter, self.ping_timeout, loop=self.loop
)
except asyncio.TimeoutError:
logger.debug("%s ! timed out waiting for pong", self.side)
self.fail_connection(1011)
break
except asyncio.CancelledError:
raise
except Exception:
logger.warning("Unexpected exception in keepalive ping task", exc_info=True) | [
"async",
"def",
"keepalive_ping",
"(",
"self",
")",
"->",
"None",
":",
"if",
"self",
".",
"ping_interval",
"is",
"None",
":",
"return",
"try",
":",
"while",
"True",
":",
"await",
"asyncio",
".",
"sleep",
"(",
"self",
".",
"ping_interval",
",",
"loop",
... | Send a Ping frame and wait for a Pong frame at regular intervals.
This coroutine exits when the connection terminates and one of the
following happens:
- :meth:`ping` raises :exc:`ConnectionClosed`, or
- :meth:`close_connection` cancels :attr:`keepalive_ping_task`. | [
"Send",
"a",
"Ping",
"frame",
"and",
"wait",
"for",
"a",
"Pong",
"frame",
"at",
"regular",
"intervals",
"."
] | 17b3f47549b6f752a1be07fa1ba3037cb59c7d56 | https://github.com/aaugustin/websockets/blob/17b3f47549b6f752a1be07fa1ba3037cb59c7d56/src/websockets/protocol.py#L971-L1009 | train | 214,185 |
aaugustin/websockets | src/websockets/protocol.py | WebSocketCommonProtocol.close_connection | async def close_connection(self) -> None:
"""
7.1.1. Close the WebSocket Connection
When the opening handshake succeeds, :meth:`connection_open` starts
this coroutine in a task. It waits for the data transfer phase to
complete then it closes the TCP connection cleanly.
When the opening handshake fails, :meth:`fail_connection` does the
same. There's no data transfer phase in that case.
"""
try:
# Wait for the data transfer phase to complete.
if hasattr(self, "transfer_data_task"):
try:
await self.transfer_data_task
except asyncio.CancelledError:
pass
# Cancel the keepalive ping task.
if hasattr(self, "keepalive_ping_task"):
self.keepalive_ping_task.cancel()
# A client should wait for a TCP close from the server.
if self.is_client and hasattr(self, "transfer_data_task"):
if await self.wait_for_connection_lost():
return
logger.debug("%s ! timed out waiting for TCP close", self.side)
# Half-close the TCP connection if possible (when there's no TLS).
if self.writer.can_write_eof():
logger.debug("%s x half-closing TCP connection", self.side)
self.writer.write_eof()
if await self.wait_for_connection_lost():
return
logger.debug("%s ! timed out waiting for TCP close", self.side)
finally:
# The try/finally ensures that the transport never remains open,
# even if this coroutine is canceled (for example).
# If connection_lost() was called, the TCP connection is closed.
# However, if TLS is enabled, the transport still needs closing.
# Else asyncio complains: ResourceWarning: unclosed transport.
if self.connection_lost_waiter.done() and not self.secure:
return
# Close the TCP connection. Buffers are flushed asynchronously.
logger.debug("%s x closing TCP connection", self.side)
self.writer.close()
if await self.wait_for_connection_lost():
return
logger.debug("%s ! timed out waiting for TCP close", self.side)
# Abort the TCP connection. Buffers are discarded.
logger.debug("%s x aborting TCP connection", self.side)
# mypy thinks self.writer.transport is a BaseTransport, not a Transport.
self.writer.transport.abort() # type: ignore
# connection_lost() is called quickly after aborting.
await self.wait_for_connection_lost() | python | async def close_connection(self) -> None:
"""
7.1.1. Close the WebSocket Connection
When the opening handshake succeeds, :meth:`connection_open` starts
this coroutine in a task. It waits for the data transfer phase to
complete then it closes the TCP connection cleanly.
When the opening handshake fails, :meth:`fail_connection` does the
same. There's no data transfer phase in that case.
"""
try:
# Wait for the data transfer phase to complete.
if hasattr(self, "transfer_data_task"):
try:
await self.transfer_data_task
except asyncio.CancelledError:
pass
# Cancel the keepalive ping task.
if hasattr(self, "keepalive_ping_task"):
self.keepalive_ping_task.cancel()
# A client should wait for a TCP close from the server.
if self.is_client and hasattr(self, "transfer_data_task"):
if await self.wait_for_connection_lost():
return
logger.debug("%s ! timed out waiting for TCP close", self.side)
# Half-close the TCP connection if possible (when there's no TLS).
if self.writer.can_write_eof():
logger.debug("%s x half-closing TCP connection", self.side)
self.writer.write_eof()
if await self.wait_for_connection_lost():
return
logger.debug("%s ! timed out waiting for TCP close", self.side)
finally:
# The try/finally ensures that the transport never remains open,
# even if this coroutine is canceled (for example).
# If connection_lost() was called, the TCP connection is closed.
# However, if TLS is enabled, the transport still needs closing.
# Else asyncio complains: ResourceWarning: unclosed transport.
if self.connection_lost_waiter.done() and not self.secure:
return
# Close the TCP connection. Buffers are flushed asynchronously.
logger.debug("%s x closing TCP connection", self.side)
self.writer.close()
if await self.wait_for_connection_lost():
return
logger.debug("%s ! timed out waiting for TCP close", self.side)
# Abort the TCP connection. Buffers are discarded.
logger.debug("%s x aborting TCP connection", self.side)
# mypy thinks self.writer.transport is a BaseTransport, not a Transport.
self.writer.transport.abort() # type: ignore
# connection_lost() is called quickly after aborting.
await self.wait_for_connection_lost() | [
"async",
"def",
"close_connection",
"(",
"self",
")",
"->",
"None",
":",
"try",
":",
"# Wait for the data transfer phase to complete.",
"if",
"hasattr",
"(",
"self",
",",
"\"transfer_data_task\"",
")",
":",
"try",
":",
"await",
"self",
".",
"transfer_data_task",
"... | 7.1.1. Close the WebSocket Connection
When the opening handshake succeeds, :meth:`connection_open` starts
this coroutine in a task. It waits for the data transfer phase to
complete then it closes the TCP connection cleanly.
When the opening handshake fails, :meth:`fail_connection` does the
same. There's no data transfer phase in that case. | [
"7",
".",
"1",
".",
"1",
".",
"Close",
"the",
"WebSocket",
"Connection"
] | 17b3f47549b6f752a1be07fa1ba3037cb59c7d56 | https://github.com/aaugustin/websockets/blob/17b3f47549b6f752a1be07fa1ba3037cb59c7d56/src/websockets/protocol.py#L1011-L1074 | train | 214,186 |
aaugustin/websockets | src/websockets/protocol.py | WebSocketCommonProtocol.wait_for_connection_lost | async def wait_for_connection_lost(self) -> bool:
"""
Wait until the TCP connection is closed or ``self.close_timeout`` elapses.
Return ``True`` if the connection is closed and ``False`` otherwise.
"""
if not self.connection_lost_waiter.done():
try:
await asyncio.wait_for(
asyncio.shield(self.connection_lost_waiter),
self.close_timeout,
loop=self.loop,
)
except asyncio.TimeoutError:
pass
# Re-check self.connection_lost_waiter.done() synchronously because
# connection_lost() could run between the moment the timeout occurs
# and the moment this coroutine resumes running.
return self.connection_lost_waiter.done() | python | async def wait_for_connection_lost(self) -> bool:
"""
Wait until the TCP connection is closed or ``self.close_timeout`` elapses.
Return ``True`` if the connection is closed and ``False`` otherwise.
"""
if not self.connection_lost_waiter.done():
try:
await asyncio.wait_for(
asyncio.shield(self.connection_lost_waiter),
self.close_timeout,
loop=self.loop,
)
except asyncio.TimeoutError:
pass
# Re-check self.connection_lost_waiter.done() synchronously because
# connection_lost() could run between the moment the timeout occurs
# and the moment this coroutine resumes running.
return self.connection_lost_waiter.done() | [
"async",
"def",
"wait_for_connection_lost",
"(",
"self",
")",
"->",
"bool",
":",
"if",
"not",
"self",
".",
"connection_lost_waiter",
".",
"done",
"(",
")",
":",
"try",
":",
"await",
"asyncio",
".",
"wait_for",
"(",
"asyncio",
".",
"shield",
"(",
"self",
... | Wait until the TCP connection is closed or ``self.close_timeout`` elapses.
Return ``True`` if the connection is closed and ``False`` otherwise. | [
"Wait",
"until",
"the",
"TCP",
"connection",
"is",
"closed",
"or",
"self",
".",
"close_timeout",
"elapses",
"."
] | 17b3f47549b6f752a1be07fa1ba3037cb59c7d56 | https://github.com/aaugustin/websockets/blob/17b3f47549b6f752a1be07fa1ba3037cb59c7d56/src/websockets/protocol.py#L1076-L1095 | train | 214,187 |
aaugustin/websockets | src/websockets/protocol.py | WebSocketCommonProtocol.fail_connection | def fail_connection(self, code: int = 1006, reason: str = "") -> None:
"""
7.1.7. Fail the WebSocket Connection
This requires:
1. Stopping all processing of incoming data, which means cancelling
:attr:`transfer_data_task`. The close code will be 1006 unless a
close frame was received earlier.
2. Sending a close frame with an appropriate code if the opening
handshake succeeded and the other side is likely to process it.
3. Closing the connection. :meth:`close_connection` takes care of
this once :attr:`transfer_data_task` exits after being canceled.
(The specification describes these steps in the opposite order.)
"""
logger.debug(
"%s ! failing %s WebSocket connection with code %d",
self.side,
self.state.name,
code,
)
# Cancel transfer_data_task if the opening handshake succeeded.
# cancel() is idempotent and ignored if the task is done already.
if hasattr(self, "transfer_data_task"):
self.transfer_data_task.cancel()
# Send a close frame when the state is OPEN (a close frame was already
# sent if it's CLOSING), except when failing the connection because of
# an error reading from or writing to the network.
# Don't send a close frame if the connection is broken.
if code != 1006 and self.state is State.OPEN:
frame_data = serialize_close(code, reason)
# Write the close frame without draining the write buffer.
# Keeping fail_connection() synchronous guarantees it can't
# get stuck and simplifies the implementation of the callers.
# Not drainig the write buffer is acceptable in this context.
# This duplicates a few lines of code from write_close_frame()
# and write_frame().
self.state = State.CLOSING
logger.debug("%s - state = CLOSING", self.side)
frame = Frame(True, OP_CLOSE, frame_data)
logger.debug("%s > %r", self.side, frame)
frame.write(
self.writer.write, mask=self.is_client, extensions=self.extensions
)
# Start close_connection_task if the opening handshake didn't succeed.
if not hasattr(self, "close_connection_task"):
self.close_connection_task = self.loop.create_task(self.close_connection()) | python | def fail_connection(self, code: int = 1006, reason: str = "") -> None:
"""
7.1.7. Fail the WebSocket Connection
This requires:
1. Stopping all processing of incoming data, which means cancelling
:attr:`transfer_data_task`. The close code will be 1006 unless a
close frame was received earlier.
2. Sending a close frame with an appropriate code if the opening
handshake succeeded and the other side is likely to process it.
3. Closing the connection. :meth:`close_connection` takes care of
this once :attr:`transfer_data_task` exits after being canceled.
(The specification describes these steps in the opposite order.)
"""
logger.debug(
"%s ! failing %s WebSocket connection with code %d",
self.side,
self.state.name,
code,
)
# Cancel transfer_data_task if the opening handshake succeeded.
# cancel() is idempotent and ignored if the task is done already.
if hasattr(self, "transfer_data_task"):
self.transfer_data_task.cancel()
# Send a close frame when the state is OPEN (a close frame was already
# sent if it's CLOSING), except when failing the connection because of
# an error reading from or writing to the network.
# Don't send a close frame if the connection is broken.
if code != 1006 and self.state is State.OPEN:
frame_data = serialize_close(code, reason)
# Write the close frame without draining the write buffer.
# Keeping fail_connection() synchronous guarantees it can't
# get stuck and simplifies the implementation of the callers.
# Not drainig the write buffer is acceptable in this context.
# This duplicates a few lines of code from write_close_frame()
# and write_frame().
self.state = State.CLOSING
logger.debug("%s - state = CLOSING", self.side)
frame = Frame(True, OP_CLOSE, frame_data)
logger.debug("%s > %r", self.side, frame)
frame.write(
self.writer.write, mask=self.is_client, extensions=self.extensions
)
# Start close_connection_task if the opening handshake didn't succeed.
if not hasattr(self, "close_connection_task"):
self.close_connection_task = self.loop.create_task(self.close_connection()) | [
"def",
"fail_connection",
"(",
"self",
",",
"code",
":",
"int",
"=",
"1006",
",",
"reason",
":",
"str",
"=",
"\"\"",
")",
"->",
"None",
":",
"logger",
".",
"debug",
"(",
"\"%s ! failing %s WebSocket connection with code %d\"",
",",
"self",
".",
"side",
",",
... | 7.1.7. Fail the WebSocket Connection
This requires:
1. Stopping all processing of incoming data, which means cancelling
:attr:`transfer_data_task`. The close code will be 1006 unless a
close frame was received earlier.
2. Sending a close frame with an appropriate code if the opening
handshake succeeded and the other side is likely to process it.
3. Closing the connection. :meth:`close_connection` takes care of
this once :attr:`transfer_data_task` exits after being canceled.
(The specification describes these steps in the opposite order.) | [
"7",
".",
"1",
".",
"7",
".",
"Fail",
"the",
"WebSocket",
"Connection"
] | 17b3f47549b6f752a1be07fa1ba3037cb59c7d56 | https://github.com/aaugustin/websockets/blob/17b3f47549b6f752a1be07fa1ba3037cb59c7d56/src/websockets/protocol.py#L1097-L1156 | train | 214,188 |
aaugustin/websockets | src/websockets/protocol.py | WebSocketCommonProtocol.abort_keepalive_pings | def abort_keepalive_pings(self) -> None:
"""
Raise ConnectionClosed in pending keepalive pings.
They'll never receive a pong once the connection is closed.
"""
assert self.state is State.CLOSED
exc = ConnectionClosed(self.close_code, self.close_reason)
exc.__cause__ = self.transfer_data_exc # emulate raise ... from ...
for ping in self.pings.values():
ping.set_exception(exc)
if self.pings:
pings_hex = ", ".join(
binascii.hexlify(ping_id).decode() or "[empty]"
for ping_id in self.pings
)
plural = "s" if len(self.pings) > 1 else ""
logger.debug(
"%s - aborted pending ping%s: %s", self.side, plural, pings_hex
) | python | def abort_keepalive_pings(self) -> None:
"""
Raise ConnectionClosed in pending keepalive pings.
They'll never receive a pong once the connection is closed.
"""
assert self.state is State.CLOSED
exc = ConnectionClosed(self.close_code, self.close_reason)
exc.__cause__ = self.transfer_data_exc # emulate raise ... from ...
for ping in self.pings.values():
ping.set_exception(exc)
if self.pings:
pings_hex = ", ".join(
binascii.hexlify(ping_id).decode() or "[empty]"
for ping_id in self.pings
)
plural = "s" if len(self.pings) > 1 else ""
logger.debug(
"%s - aborted pending ping%s: %s", self.side, plural, pings_hex
) | [
"def",
"abort_keepalive_pings",
"(",
"self",
")",
"->",
"None",
":",
"assert",
"self",
".",
"state",
"is",
"State",
".",
"CLOSED",
"exc",
"=",
"ConnectionClosed",
"(",
"self",
".",
"close_code",
",",
"self",
".",
"close_reason",
")",
"exc",
".",
"__cause__... | Raise ConnectionClosed in pending keepalive pings.
They'll never receive a pong once the connection is closed. | [
"Raise",
"ConnectionClosed",
"in",
"pending",
"keepalive",
"pings",
"."
] | 17b3f47549b6f752a1be07fa1ba3037cb59c7d56 | https://github.com/aaugustin/websockets/blob/17b3f47549b6f752a1be07fa1ba3037cb59c7d56/src/websockets/protocol.py#L1158-L1180 | train | 214,189 |
aaugustin/websockets | src/websockets/protocol.py | WebSocketCommonProtocol.connection_made | def connection_made(self, transport: asyncio.BaseTransport) -> None:
"""
Configure write buffer limits.
The high-water limit is defined by ``self.write_limit``.
The low-water limit currently defaults to ``self.write_limit // 4`` in
:meth:`~asyncio.WriteTransport.set_write_buffer_limits`, which should
be all right for reasonable use cases of this library.
This is the earliest point where we can get hold of the transport,
which means it's the best point for configuring it.
"""
logger.debug("%s - event = connection_made(%s)", self.side, transport)
# mypy thinks transport is a BaseTransport, not a Transport.
transport.set_write_buffer_limits(self.write_limit) # type: ignore
super().connection_made(transport) | python | def connection_made(self, transport: asyncio.BaseTransport) -> None:
"""
Configure write buffer limits.
The high-water limit is defined by ``self.write_limit``.
The low-water limit currently defaults to ``self.write_limit // 4`` in
:meth:`~asyncio.WriteTransport.set_write_buffer_limits`, which should
be all right for reasonable use cases of this library.
This is the earliest point where we can get hold of the transport,
which means it's the best point for configuring it.
"""
logger.debug("%s - event = connection_made(%s)", self.side, transport)
# mypy thinks transport is a BaseTransport, not a Transport.
transport.set_write_buffer_limits(self.write_limit) # type: ignore
super().connection_made(transport) | [
"def",
"connection_made",
"(",
"self",
",",
"transport",
":",
"asyncio",
".",
"BaseTransport",
")",
"->",
"None",
":",
"logger",
".",
"debug",
"(",
"\"%s - event = connection_made(%s)\"",
",",
"self",
".",
"side",
",",
"transport",
")",
"# mypy thinks transport is... | Configure write buffer limits.
The high-water limit is defined by ``self.write_limit``.
The low-water limit currently defaults to ``self.write_limit // 4`` in
:meth:`~asyncio.WriteTransport.set_write_buffer_limits`, which should
be all right for reasonable use cases of this library.
This is the earliest point where we can get hold of the transport,
which means it's the best point for configuring it. | [
"Configure",
"write",
"buffer",
"limits",
"."
] | 17b3f47549b6f752a1be07fa1ba3037cb59c7d56 | https://github.com/aaugustin/websockets/blob/17b3f47549b6f752a1be07fa1ba3037cb59c7d56/src/websockets/protocol.py#L1184-L1201 | train | 214,190 |
aaugustin/websockets | src/websockets/protocol.py | WebSocketCommonProtocol.eof_received | def eof_received(self) -> bool:
"""
Close the transport after receiving EOF.
Since Python 3.5, `:meth:~StreamReaderProtocol.eof_received` returns
``True`` on non-TLS connections.
See http://bugs.python.org/issue24539 for more information.
This is inappropriate for websockets for at least three reasons:
1. The use case is to read data until EOF with self.reader.read(-1).
Since websockets is a TLV protocol, this never happens.
2. It doesn't work on TLS connections. A falsy value must be
returned to have the same behavior on TLS and plain connections.
3. The websockets protocol has its own closing handshake. Endpoints
close the TCP connection after sending a close frame.
As a consequence we revert to the previous, more useful behavior.
"""
logger.debug("%s - event = eof_received()", self.side)
super().eof_received()
return False | python | def eof_received(self) -> bool:
"""
Close the transport after receiving EOF.
Since Python 3.5, `:meth:~StreamReaderProtocol.eof_received` returns
``True`` on non-TLS connections.
See http://bugs.python.org/issue24539 for more information.
This is inappropriate for websockets for at least three reasons:
1. The use case is to read data until EOF with self.reader.read(-1).
Since websockets is a TLV protocol, this never happens.
2. It doesn't work on TLS connections. A falsy value must be
returned to have the same behavior on TLS and plain connections.
3. The websockets protocol has its own closing handshake. Endpoints
close the TCP connection after sending a close frame.
As a consequence we revert to the previous, more useful behavior.
"""
logger.debug("%s - event = eof_received()", self.side)
super().eof_received()
return False | [
"def",
"eof_received",
"(",
"self",
")",
"->",
"bool",
":",
"logger",
".",
"debug",
"(",
"\"%s - event = eof_received()\"",
",",
"self",
".",
"side",
")",
"super",
"(",
")",
".",
"eof_received",
"(",
")",
"return",
"False"
] | Close the transport after receiving EOF.
Since Python 3.5, `:meth:~StreamReaderProtocol.eof_received` returns
``True`` on non-TLS connections.
See http://bugs.python.org/issue24539 for more information.
This is inappropriate for websockets for at least three reasons:
1. The use case is to read data until EOF with self.reader.read(-1).
Since websockets is a TLV protocol, this never happens.
2. It doesn't work on TLS connections. A falsy value must be
returned to have the same behavior on TLS and plain connections.
3. The websockets protocol has its own closing handshake. Endpoints
close the TCP connection after sending a close frame.
As a consequence we revert to the previous, more useful behavior. | [
"Close",
"the",
"transport",
"after",
"receiving",
"EOF",
"."
] | 17b3f47549b6f752a1be07fa1ba3037cb59c7d56 | https://github.com/aaugustin/websockets/blob/17b3f47549b6f752a1be07fa1ba3037cb59c7d56/src/websockets/protocol.py#L1203-L1228 | train | 214,191 |
aaugustin/websockets | src/websockets/protocol.py | WebSocketCommonProtocol.connection_lost | def connection_lost(self, exc: Optional[Exception]) -> None:
"""
7.1.4. The WebSocket Connection is Closed.
"""
logger.debug("%s - event = connection_lost(%s)", self.side, exc)
self.state = State.CLOSED
logger.debug("%s - state = CLOSED", self.side)
if not hasattr(self, "close_code"):
self.close_code = 1006
if not hasattr(self, "close_reason"):
self.close_reason = ""
logger.debug(
"%s x code = %d, reason = %s",
self.side,
self.close_code,
self.close_reason or "[no reason]",
)
self.abort_keepalive_pings()
# If self.connection_lost_waiter isn't pending, that's a bug, because:
# - it's set only here in connection_lost() which is called only once;
# - it must never be canceled.
self.connection_lost_waiter.set_result(None)
super().connection_lost(exc) | python | def connection_lost(self, exc: Optional[Exception]) -> None:
"""
7.1.4. The WebSocket Connection is Closed.
"""
logger.debug("%s - event = connection_lost(%s)", self.side, exc)
self.state = State.CLOSED
logger.debug("%s - state = CLOSED", self.side)
if not hasattr(self, "close_code"):
self.close_code = 1006
if not hasattr(self, "close_reason"):
self.close_reason = ""
logger.debug(
"%s x code = %d, reason = %s",
self.side,
self.close_code,
self.close_reason or "[no reason]",
)
self.abort_keepalive_pings()
# If self.connection_lost_waiter isn't pending, that's a bug, because:
# - it's set only here in connection_lost() which is called only once;
# - it must never be canceled.
self.connection_lost_waiter.set_result(None)
super().connection_lost(exc) | [
"def",
"connection_lost",
"(",
"self",
",",
"exc",
":",
"Optional",
"[",
"Exception",
"]",
")",
"->",
"None",
":",
"logger",
".",
"debug",
"(",
"\"%s - event = connection_lost(%s)\"",
",",
"self",
".",
"side",
",",
"exc",
")",
"self",
".",
"state",
"=",
... | 7.1.4. The WebSocket Connection is Closed. | [
"7",
".",
"1",
".",
"4",
".",
"The",
"WebSocket",
"Connection",
"is",
"Closed",
"."
] | 17b3f47549b6f752a1be07fa1ba3037cb59c7d56 | https://github.com/aaugustin/websockets/blob/17b3f47549b6f752a1be07fa1ba3037cb59c7d56/src/websockets/protocol.py#L1230-L1253 | train | 214,192 |
aaugustin/websockets | src/websockets/handshake.py | build_request | def build_request(headers: Headers) -> str:
"""
Build a handshake request to send to the server.
Return the ``key`` which must be passed to :func:`check_response`.
"""
raw_key = bytes(random.getrandbits(8) for _ in range(16))
key = base64.b64encode(raw_key).decode()
headers["Upgrade"] = "websocket"
headers["Connection"] = "Upgrade"
headers["Sec-WebSocket-Key"] = key
headers["Sec-WebSocket-Version"] = "13"
return key | python | def build_request(headers: Headers) -> str:
"""
Build a handshake request to send to the server.
Return the ``key`` which must be passed to :func:`check_response`.
"""
raw_key = bytes(random.getrandbits(8) for _ in range(16))
key = base64.b64encode(raw_key).decode()
headers["Upgrade"] = "websocket"
headers["Connection"] = "Upgrade"
headers["Sec-WebSocket-Key"] = key
headers["Sec-WebSocket-Version"] = "13"
return key | [
"def",
"build_request",
"(",
"headers",
":",
"Headers",
")",
"->",
"str",
":",
"raw_key",
"=",
"bytes",
"(",
"random",
".",
"getrandbits",
"(",
"8",
")",
"for",
"_",
"in",
"range",
"(",
"16",
")",
")",
"key",
"=",
"base64",
".",
"b64encode",
"(",
"... | Build a handshake request to send to the server.
Return the ``key`` which must be passed to :func:`check_response`. | [
"Build",
"a",
"handshake",
"request",
"to",
"send",
"to",
"the",
"server",
"."
] | 17b3f47549b6f752a1be07fa1ba3037cb59c7d56 | https://github.com/aaugustin/websockets/blob/17b3f47549b6f752a1be07fa1ba3037cb59c7d56/src/websockets/handshake.py#L49-L62 | train | 214,193 |
aaugustin/websockets | src/websockets/handshake.py | check_request | def check_request(headers: Headers) -> str:
"""
Check a handshake request received from the client.
If the handshake is valid, this function returns the ``key`` which must be
passed to :func:`build_response`.
Otherwise it raises an :exc:`~websockets.exceptions.InvalidHandshake`
exception and the server must return an error like 400 Bad Request.
This function doesn't verify that the request is an HTTP/1.1 or higher GET
request and doesn't perform Host and Origin checks. These controls are
usually performed earlier in the HTTP request handling code. They're the
responsibility of the caller.
"""
connection = sum(
[parse_connection(value) for value in headers.get_all("Connection")], []
)
if not any(value.lower() == "upgrade" for value in connection):
raise InvalidUpgrade("Connection", ", ".join(connection))
upgrade = sum([parse_upgrade(value) for value in headers.get_all("Upgrade")], [])
# For compatibility with non-strict implementations, ignore case when
# checking the Upgrade header. It's supposed to be 'WebSocket'.
if not (len(upgrade) == 1 and upgrade[0].lower() == "websocket"):
raise InvalidUpgrade("Upgrade", ", ".join(upgrade))
try:
s_w_key = headers["Sec-WebSocket-Key"]
except KeyError:
raise InvalidHeader("Sec-WebSocket-Key")
except MultipleValuesError:
raise InvalidHeader(
"Sec-WebSocket-Key", "more than one Sec-WebSocket-Key header found"
)
try:
raw_key = base64.b64decode(s_w_key.encode(), validate=True)
except binascii.Error:
raise InvalidHeaderValue("Sec-WebSocket-Key", s_w_key)
if len(raw_key) != 16:
raise InvalidHeaderValue("Sec-WebSocket-Key", s_w_key)
try:
s_w_version = headers["Sec-WebSocket-Version"]
except KeyError:
raise InvalidHeader("Sec-WebSocket-Version")
except MultipleValuesError:
raise InvalidHeader(
"Sec-WebSocket-Version", "more than one Sec-WebSocket-Version header found"
)
if s_w_version != "13":
raise InvalidHeaderValue("Sec-WebSocket-Version", s_w_version)
return s_w_key | python | def check_request(headers: Headers) -> str:
"""
Check a handshake request received from the client.
If the handshake is valid, this function returns the ``key`` which must be
passed to :func:`build_response`.
Otherwise it raises an :exc:`~websockets.exceptions.InvalidHandshake`
exception and the server must return an error like 400 Bad Request.
This function doesn't verify that the request is an HTTP/1.1 or higher GET
request and doesn't perform Host and Origin checks. These controls are
usually performed earlier in the HTTP request handling code. They're the
responsibility of the caller.
"""
connection = sum(
[parse_connection(value) for value in headers.get_all("Connection")], []
)
if not any(value.lower() == "upgrade" for value in connection):
raise InvalidUpgrade("Connection", ", ".join(connection))
upgrade = sum([parse_upgrade(value) for value in headers.get_all("Upgrade")], [])
# For compatibility with non-strict implementations, ignore case when
# checking the Upgrade header. It's supposed to be 'WebSocket'.
if not (len(upgrade) == 1 and upgrade[0].lower() == "websocket"):
raise InvalidUpgrade("Upgrade", ", ".join(upgrade))
try:
s_w_key = headers["Sec-WebSocket-Key"]
except KeyError:
raise InvalidHeader("Sec-WebSocket-Key")
except MultipleValuesError:
raise InvalidHeader(
"Sec-WebSocket-Key", "more than one Sec-WebSocket-Key header found"
)
try:
raw_key = base64.b64decode(s_w_key.encode(), validate=True)
except binascii.Error:
raise InvalidHeaderValue("Sec-WebSocket-Key", s_w_key)
if len(raw_key) != 16:
raise InvalidHeaderValue("Sec-WebSocket-Key", s_w_key)
try:
s_w_version = headers["Sec-WebSocket-Version"]
except KeyError:
raise InvalidHeader("Sec-WebSocket-Version")
except MultipleValuesError:
raise InvalidHeader(
"Sec-WebSocket-Version", "more than one Sec-WebSocket-Version header found"
)
if s_w_version != "13":
raise InvalidHeaderValue("Sec-WebSocket-Version", s_w_version)
return s_w_key | [
"def",
"check_request",
"(",
"headers",
":",
"Headers",
")",
"->",
"str",
":",
"connection",
"=",
"sum",
"(",
"[",
"parse_connection",
"(",
"value",
")",
"for",
"value",
"in",
"headers",
".",
"get_all",
"(",
"\"Connection\"",
")",
"]",
",",
"[",
"]",
"... | Check a handshake request received from the client.
If the handshake is valid, this function returns the ``key`` which must be
passed to :func:`build_response`.
Otherwise it raises an :exc:`~websockets.exceptions.InvalidHandshake`
exception and the server must return an error like 400 Bad Request.
This function doesn't verify that the request is an HTTP/1.1 or higher GET
request and doesn't perform Host and Origin checks. These controls are
usually performed earlier in the HTTP request handling code. They're the
responsibility of the caller. | [
"Check",
"a",
"handshake",
"request",
"received",
"from",
"the",
"client",
"."
] | 17b3f47549b6f752a1be07fa1ba3037cb59c7d56 | https://github.com/aaugustin/websockets/blob/17b3f47549b6f752a1be07fa1ba3037cb59c7d56/src/websockets/handshake.py#L65-L123 | train | 214,194 |
aaugustin/websockets | src/websockets/handshake.py | build_response | def build_response(headers: Headers, key: str) -> None:
"""
Build a handshake response to send to the client.
``key`` comes from :func:`check_request`.
"""
headers["Upgrade"] = "websocket"
headers["Connection"] = "Upgrade"
headers["Sec-WebSocket-Accept"] = accept(key) | python | def build_response(headers: Headers, key: str) -> None:
"""
Build a handshake response to send to the client.
``key`` comes from :func:`check_request`.
"""
headers["Upgrade"] = "websocket"
headers["Connection"] = "Upgrade"
headers["Sec-WebSocket-Accept"] = accept(key) | [
"def",
"build_response",
"(",
"headers",
":",
"Headers",
",",
"key",
":",
"str",
")",
"->",
"None",
":",
"headers",
"[",
"\"Upgrade\"",
"]",
"=",
"\"websocket\"",
"headers",
"[",
"\"Connection\"",
"]",
"=",
"\"Upgrade\"",
"headers",
"[",
"\"Sec-WebSocket-Accep... | Build a handshake response to send to the client.
``key`` comes from :func:`check_request`. | [
"Build",
"a",
"handshake",
"response",
"to",
"send",
"to",
"the",
"client",
"."
] | 17b3f47549b6f752a1be07fa1ba3037cb59c7d56 | https://github.com/aaugustin/websockets/blob/17b3f47549b6f752a1be07fa1ba3037cb59c7d56/src/websockets/handshake.py#L126-L135 | train | 214,195 |
aaugustin/websockets | src/websockets/handshake.py | check_response | def check_response(headers: Headers, key: str) -> None:
"""
Check a handshake response received from the server.
``key`` comes from :func:`build_request`.
If the handshake is valid, this function returns ``None``.
Otherwise it raises an :exc:`~websockets.exceptions.InvalidHandshake`
exception.
This function doesn't verify that the response is an HTTP/1.1 or higher
response with a 101 status code. These controls are the responsibility of
the caller.
"""
connection = sum(
[parse_connection(value) for value in headers.get_all("Connection")], []
)
if not any(value.lower() == "upgrade" for value in connection):
raise InvalidUpgrade("Connection", " ".join(connection))
upgrade = sum([parse_upgrade(value) for value in headers.get_all("Upgrade")], [])
# For compatibility with non-strict implementations, ignore case when
# checking the Upgrade header. It's supposed to be 'WebSocket'.
if not (len(upgrade) == 1 and upgrade[0].lower() == "websocket"):
raise InvalidUpgrade("Upgrade", ", ".join(upgrade))
try:
s_w_accept = headers["Sec-WebSocket-Accept"]
except KeyError:
raise InvalidHeader("Sec-WebSocket-Accept")
except MultipleValuesError:
raise InvalidHeader(
"Sec-WebSocket-Accept", "more than one Sec-WebSocket-Accept header found"
)
if s_w_accept != accept(key):
raise InvalidHeaderValue("Sec-WebSocket-Accept", s_w_accept) | python | def check_response(headers: Headers, key: str) -> None:
"""
Check a handshake response received from the server.
``key`` comes from :func:`build_request`.
If the handshake is valid, this function returns ``None``.
Otherwise it raises an :exc:`~websockets.exceptions.InvalidHandshake`
exception.
This function doesn't verify that the response is an HTTP/1.1 or higher
response with a 101 status code. These controls are the responsibility of
the caller.
"""
connection = sum(
[parse_connection(value) for value in headers.get_all("Connection")], []
)
if not any(value.lower() == "upgrade" for value in connection):
raise InvalidUpgrade("Connection", " ".join(connection))
upgrade = sum([parse_upgrade(value) for value in headers.get_all("Upgrade")], [])
# For compatibility with non-strict implementations, ignore case when
# checking the Upgrade header. It's supposed to be 'WebSocket'.
if not (len(upgrade) == 1 and upgrade[0].lower() == "websocket"):
raise InvalidUpgrade("Upgrade", ", ".join(upgrade))
try:
s_w_accept = headers["Sec-WebSocket-Accept"]
except KeyError:
raise InvalidHeader("Sec-WebSocket-Accept")
except MultipleValuesError:
raise InvalidHeader(
"Sec-WebSocket-Accept", "more than one Sec-WebSocket-Accept header found"
)
if s_w_accept != accept(key):
raise InvalidHeaderValue("Sec-WebSocket-Accept", s_w_accept) | [
"def",
"check_response",
"(",
"headers",
":",
"Headers",
",",
"key",
":",
"str",
")",
"->",
"None",
":",
"connection",
"=",
"sum",
"(",
"[",
"parse_connection",
"(",
"value",
")",
"for",
"value",
"in",
"headers",
".",
"get_all",
"(",
"\"Connection\"",
")... | Check a handshake response received from the server.
``key`` comes from :func:`build_request`.
If the handshake is valid, this function returns ``None``.
Otherwise it raises an :exc:`~websockets.exceptions.InvalidHandshake`
exception.
This function doesn't verify that the response is an HTTP/1.1 or higher
response with a 101 status code. These controls are the responsibility of
the caller. | [
"Check",
"a",
"handshake",
"response",
"received",
"from",
"the",
"server",
"."
] | 17b3f47549b6f752a1be07fa1ba3037cb59c7d56 | https://github.com/aaugustin/websockets/blob/17b3f47549b6f752a1be07fa1ba3037cb59c7d56/src/websockets/handshake.py#L138-L178 | train | 214,196 |
aaugustin/websockets | src/websockets/extensions/permessage_deflate.py | PerMessageDeflate.decode | def decode(self, frame: Frame, *, max_size: Optional[int] = None) -> Frame:
"""
Decode an incoming frame.
"""
# Skip control frames.
if frame.opcode in CTRL_OPCODES:
return frame
# Handle continuation data frames:
# - skip if the initial data frame wasn't encoded
# - reset "decode continuation data" flag if it's a final frame
if frame.opcode == OP_CONT:
if not self.decode_cont_data:
return frame
if frame.fin:
self.decode_cont_data = False
# Handle text and binary data frames:
# - skip if the frame isn't encoded
# - set "decode continuation data" flag if it's a non-final frame
else:
if not frame.rsv1:
return frame
if not frame.fin: # frame.rsv1 is True at this point
self.decode_cont_data = True
# Re-initialize per-message decoder.
if self.remote_no_context_takeover:
self.decoder = zlib.decompressobj(wbits=-self.remote_max_window_bits)
# Uncompress compressed frames. Protect against zip bombs by
# preventing zlib from decompressing more than max_length bytes
# (except when the limit is disabled with max_size = None).
data = frame.data
if frame.fin:
data += _EMPTY_UNCOMPRESSED_BLOCK
max_length = 0 if max_size is None else max_size
data = self.decoder.decompress(data, max_length)
if self.decoder.unconsumed_tail:
raise PayloadTooBig(
f"Uncompressed payload length exceeds size limit (? > {max_size} bytes)"
)
# Allow garbage collection of the decoder if it won't be reused.
if frame.fin and self.remote_no_context_takeover:
del self.decoder
return frame._replace(data=data, rsv1=False) | python | def decode(self, frame: Frame, *, max_size: Optional[int] = None) -> Frame:
"""
Decode an incoming frame.
"""
# Skip control frames.
if frame.opcode in CTRL_OPCODES:
return frame
# Handle continuation data frames:
# - skip if the initial data frame wasn't encoded
# - reset "decode continuation data" flag if it's a final frame
if frame.opcode == OP_CONT:
if not self.decode_cont_data:
return frame
if frame.fin:
self.decode_cont_data = False
# Handle text and binary data frames:
# - skip if the frame isn't encoded
# - set "decode continuation data" flag if it's a non-final frame
else:
if not frame.rsv1:
return frame
if not frame.fin: # frame.rsv1 is True at this point
self.decode_cont_data = True
# Re-initialize per-message decoder.
if self.remote_no_context_takeover:
self.decoder = zlib.decompressobj(wbits=-self.remote_max_window_bits)
# Uncompress compressed frames. Protect against zip bombs by
# preventing zlib from decompressing more than max_length bytes
# (except when the limit is disabled with max_size = None).
data = frame.data
if frame.fin:
data += _EMPTY_UNCOMPRESSED_BLOCK
max_length = 0 if max_size is None else max_size
data = self.decoder.decompress(data, max_length)
if self.decoder.unconsumed_tail:
raise PayloadTooBig(
f"Uncompressed payload length exceeds size limit (? > {max_size} bytes)"
)
# Allow garbage collection of the decoder if it won't be reused.
if frame.fin and self.remote_no_context_takeover:
del self.decoder
return frame._replace(data=data, rsv1=False) | [
"def",
"decode",
"(",
"self",
",",
"frame",
":",
"Frame",
",",
"*",
",",
"max_size",
":",
"Optional",
"[",
"int",
"]",
"=",
"None",
")",
"->",
"Frame",
":",
"# Skip control frames.",
"if",
"frame",
".",
"opcode",
"in",
"CTRL_OPCODES",
":",
"return",
"f... | Decode an incoming frame. | [
"Decode",
"an",
"incoming",
"frame",
"."
] | 17b3f47549b6f752a1be07fa1ba3037cb59c7d56 | https://github.com/aaugustin/websockets/blob/17b3f47549b6f752a1be07fa1ba3037cb59c7d56/src/websockets/extensions/permessage_deflate.py#L91-L139 | train | 214,197 |
aaugustin/websockets | src/websockets/extensions/permessage_deflate.py | PerMessageDeflate.encode | def encode(self, frame: Frame) -> Frame:
"""
Encode an outgoing frame.
"""
# Skip control frames.
if frame.opcode in CTRL_OPCODES:
return frame
# Since we always encode and never fragment messages, there's no logic
# similar to decode() here at this time.
if frame.opcode != OP_CONT:
# Re-initialize per-message decoder.
if self.local_no_context_takeover:
self.encoder = zlib.compressobj(
wbits=-self.local_max_window_bits, **self.compress_settings
)
# Compress data frames.
data = self.encoder.compress(frame.data) + self.encoder.flush(zlib.Z_SYNC_FLUSH)
if frame.fin and data.endswith(_EMPTY_UNCOMPRESSED_BLOCK):
data = data[:-4]
# Allow garbage collection of the encoder if it won't be reused.
if frame.fin and self.local_no_context_takeover:
del self.encoder
return frame._replace(data=data, rsv1=True) | python | def encode(self, frame: Frame) -> Frame:
"""
Encode an outgoing frame.
"""
# Skip control frames.
if frame.opcode in CTRL_OPCODES:
return frame
# Since we always encode and never fragment messages, there's no logic
# similar to decode() here at this time.
if frame.opcode != OP_CONT:
# Re-initialize per-message decoder.
if self.local_no_context_takeover:
self.encoder = zlib.compressobj(
wbits=-self.local_max_window_bits, **self.compress_settings
)
# Compress data frames.
data = self.encoder.compress(frame.data) + self.encoder.flush(zlib.Z_SYNC_FLUSH)
if frame.fin and data.endswith(_EMPTY_UNCOMPRESSED_BLOCK):
data = data[:-4]
# Allow garbage collection of the encoder if it won't be reused.
if frame.fin and self.local_no_context_takeover:
del self.encoder
return frame._replace(data=data, rsv1=True) | [
"def",
"encode",
"(",
"self",
",",
"frame",
":",
"Frame",
")",
"->",
"Frame",
":",
"# Skip control frames.",
"if",
"frame",
".",
"opcode",
"in",
"CTRL_OPCODES",
":",
"return",
"frame",
"# Since we always encode and never fragment messages, there's no logic",
"# similar ... | Encode an outgoing frame. | [
"Encode",
"an",
"outgoing",
"frame",
"."
] | 17b3f47549b6f752a1be07fa1ba3037cb59c7d56 | https://github.com/aaugustin/websockets/blob/17b3f47549b6f752a1be07fa1ba3037cb59c7d56/src/websockets/extensions/permessage_deflate.py#L141-L169 | train | 214,198 |
aaugustin/websockets | src/websockets/extensions/permessage_deflate.py | ClientPerMessageDeflateFactory.get_request_params | def get_request_params(self) -> List[ExtensionParameter]:
"""
Build request parameters.
"""
return _build_parameters(
self.server_no_context_takeover,
self.client_no_context_takeover,
self.server_max_window_bits,
self.client_max_window_bits,
) | python | def get_request_params(self) -> List[ExtensionParameter]:
"""
Build request parameters.
"""
return _build_parameters(
self.server_no_context_takeover,
self.client_no_context_takeover,
self.server_max_window_bits,
self.client_max_window_bits,
) | [
"def",
"get_request_params",
"(",
"self",
")",
"->",
"List",
"[",
"ExtensionParameter",
"]",
":",
"return",
"_build_parameters",
"(",
"self",
".",
"server_no_context_takeover",
",",
"self",
".",
"client_no_context_takeover",
",",
"self",
".",
"server_max_window_bits",... | Build request parameters. | [
"Build",
"request",
"parameters",
"."
] | 17b3f47549b6f752a1be07fa1ba3037cb59c7d56 | https://github.com/aaugustin/websockets/blob/17b3f47549b6f752a1be07fa1ba3037cb59c7d56/src/websockets/extensions/permessage_deflate.py#L313-L323 | train | 214,199 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.