body
stringlengths 26
98.2k
| body_hash
int64 -9,222,864,604,528,158,000
9,221,803,474B
| docstring
stringlengths 1
16.8k
| path
stringlengths 5
230
| name
stringlengths 1
96
| repository_name
stringlengths 7
89
| lang
stringclasses 1
value | body_without_docstring
stringlengths 20
98.2k
|
|---|---|---|---|---|---|---|---|
def one_hot_spectrum(spec, tol, max_mz, shift=0, min_mz=0, method='max'):
"Convert spectrum peaks into on-hot-vector\n\n method: str\n 'max' take highest intensity peak within every bin.\n 'sum' take sum of all peaks within every bin.\n "
dim_vector = int(((max_mz - min_mz) / tol))
one_hot_spec = np.zeros(dim_vector)
idx = (((spec[:, 0] + shift) * 1) / tol).astype(int)
idx[(idx >= dim_vector)] = 0
idx[(idx < 0)] = 0
if (method == 'max'):
for id1 in set(idx):
one_hot_spec[id1] = np.max(spec[((idx == id1), 1)])
elif (method == 'sum'):
for id1 in set(idx):
one_hot_spec[id1] = np.sum(spec[((idx == id1), 1)])
else:
print('Method not known...')
return one_hot_spec
| 1,814,090,381,247,568,000
|
Convert spectrum peaks into on-hot-vector
method: str
'max' take highest intensity peak within every bin.
'sum' take sum of all peaks within every bin.
|
matchms/old/ms_similarity_classical.py
|
one_hot_spectrum
|
matchms/old-iomega-spec2vec
|
python
|
def one_hot_spectrum(spec, tol, max_mz, shift=0, min_mz=0, method='max'):
"Convert spectrum peaks into on-hot-vector\n\n method: str\n 'max' take highest intensity peak within every bin.\n 'sum' take sum of all peaks within every bin.\n "
dim_vector = int(((max_mz - min_mz) / tol))
one_hot_spec = np.zeros(dim_vector)
idx = (((spec[:, 0] + shift) * 1) / tol).astype(int)
idx[(idx >= dim_vector)] = 0
idx[(idx < 0)] = 0
if (method == 'max'):
for id1 in set(idx):
one_hot_spec[id1] = np.max(spec[((idx == id1), 1)])
elif (method == 'sum'):
for id1 in set(idx):
one_hot_spec[id1] = np.sum(spec[((idx == id1), 1)])
else:
print('Method not known...')
return one_hot_spec
|
@numba.njit
def find_pairs_numba(spec1, spec2, tol, shift=0):
'Find matching pairs between two spectra.\n\n Args\n ----\n spec1 : list of tuples\n List of (mz, intensity) tuples.\n spec2 : list of tuples\n List of (mz, intensity) tuples.\n tol : float\n Tolerance. Peaks will be considered a match when < tol appart.\n shift : float, optional\n Shift spectra peaks by shift. The default is 0.\n\n Returns\n -------\n matching_pairs : list\n List of found matching peaks.\n\n '
matching_pairs = []
for idx in range(len(spec1)):
intensity = spec1[(idx, 1)]
matches = np.where((np.abs(((spec2[:, 0] - spec1[(idx, 0)]) + shift)) <= tol))[0]
for match in matches:
matching_pairs.append((idx, match, (intensity * spec2[match][1])))
return matching_pairs
| -585,652,530,292,401,300
|
Find matching pairs between two spectra.
Args
----
spec1 : list of tuples
List of (mz, intensity) tuples.
spec2 : list of tuples
List of (mz, intensity) tuples.
tol : float
Tolerance. Peaks will be considered a match when < tol appart.
shift : float, optional
Shift spectra peaks by shift. The default is 0.
Returns
-------
matching_pairs : list
List of found matching peaks.
|
matchms/old/ms_similarity_classical.py
|
find_pairs_numba
|
matchms/old-iomega-spec2vec
|
python
|
@numba.njit
def find_pairs_numba(spec1, spec2, tol, shift=0):
'Find matching pairs between two spectra.\n\n Args\n ----\n spec1 : list of tuples\n List of (mz, intensity) tuples.\n spec2 : list of tuples\n List of (mz, intensity) tuples.\n tol : float\n Tolerance. Peaks will be considered a match when < tol appart.\n shift : float, optional\n Shift spectra peaks by shift. The default is 0.\n\n Returns\n -------\n matching_pairs : list\n List of found matching peaks.\n\n '
matching_pairs = []
for idx in range(len(spec1)):
intensity = spec1[(idx, 1)]
matches = np.where((np.abs(((spec2[:, 0] - spec1[(idx, 0)]) + shift)) <= tol))[0]
for match in matches:
matching_pairs.append((idx, match, (intensity * spec2[match][1])))
return matching_pairs
|
def find_pairs(spec1, spec2, tol, shift=0):
'Find matching pairs between two spectra.\n\n Args\n ----\n spec1 : list of tuples\n List of (mz, intensity) tuples.\n spec2 : list of tuples\n List of (mz, intensity) tuples.\n tol : float\n Tolerance. Peaks will be considered a match when < tol appart.\n shift : float, optional\n Shift spectra peaks by shift. The default is 0.\n\n Returns\n -------\n matching_pairs : list\n List of found matching peaks.\n\n '
spec1 = spec1[np.lexsort((spec1[:, 1], spec1[:, 0])), :]
spec2 = spec2[np.lexsort((spec2[:, 1], spec2[:, 0])), :]
matching_pairs = []
spec2lowpos = 0
spec2length = len(spec2)
for idx in range(len(spec1)):
mz = spec1[(idx, 0)]
intensity = spec1[(idx, 1)]
while ((spec2lowpos < spec2length) and ((spec2[spec2lowpos][0] + shift) < (mz - tol))):
spec2lowpos += 1
if (spec2lowpos == spec2length):
break
spec2pos = spec2lowpos
while ((spec2pos < spec2length) and ((spec2[spec2pos][0] + shift) < (mz + tol))):
matching_pairs.append((idx, spec2pos, (intensity * spec2[spec2pos][1])))
spec2pos += 1
return matching_pairs
| 7,536,612,827,658,155,000
|
Find matching pairs between two spectra.
Args
----
spec1 : list of tuples
List of (mz, intensity) tuples.
spec2 : list of tuples
List of (mz, intensity) tuples.
tol : float
Tolerance. Peaks will be considered a match when < tol appart.
shift : float, optional
Shift spectra peaks by shift. The default is 0.
Returns
-------
matching_pairs : list
List of found matching peaks.
|
matchms/old/ms_similarity_classical.py
|
find_pairs
|
matchms/old-iomega-spec2vec
|
python
|
def find_pairs(spec1, spec2, tol, shift=0):
'Find matching pairs between two spectra.\n\n Args\n ----\n spec1 : list of tuples\n List of (mz, intensity) tuples.\n spec2 : list of tuples\n List of (mz, intensity) tuples.\n tol : float\n Tolerance. Peaks will be considered a match when < tol appart.\n shift : float, optional\n Shift spectra peaks by shift. The default is 0.\n\n Returns\n -------\n matching_pairs : list\n List of found matching peaks.\n\n '
spec1 = spec1[np.lexsort((spec1[:, 1], spec1[:, 0])), :]
spec2 = spec2[np.lexsort((spec2[:, 1], spec2[:, 0])), :]
matching_pairs = []
spec2lowpos = 0
spec2length = len(spec2)
for idx in range(len(spec1)):
mz = spec1[(idx, 0)]
intensity = spec1[(idx, 1)]
while ((spec2lowpos < spec2length) and ((spec2[spec2lowpos][0] + shift) < (mz - tol))):
spec2lowpos += 1
if (spec2lowpos == spec2length):
break
spec2pos = spec2lowpos
while ((spec2pos < spec2length) and ((spec2[spec2pos][0] + shift) < (mz + tol))):
matching_pairs.append((idx, spec2pos, (intensity * spec2[spec2pos][1])))
spec2pos += 1
return matching_pairs
|
def test_luminance_Newhall1943(self):
'\n Tests :func:`colour.colorimetry.luminance.luminance_Newhall1943`\n definition.\n '
self.assertAlmostEqual(luminance_Newhall1943(4.08244375), 12.550078816731881, places=7)
self.assertAlmostEqual(luminance_Newhall1943(5.39132685), 23.481252371310738, places=7)
self.assertAlmostEqual(luminance_Newhall1943(2.97619312), 6.451426687560192, places=7)
| -736,306,793,631,554,300
|
Tests :func:`colour.colorimetry.luminance.luminance_Newhall1943`
definition.
|
colour/colorimetry/tests/test_luminance.py
|
test_luminance_Newhall1943
|
colour-science/colour
|
python
|
def test_luminance_Newhall1943(self):
'\n Tests :func:`colour.colorimetry.luminance.luminance_Newhall1943`\n definition.\n '
self.assertAlmostEqual(luminance_Newhall1943(4.08244375), 12.550078816731881, places=7)
self.assertAlmostEqual(luminance_Newhall1943(5.39132685), 23.481252371310738, places=7)
self.assertAlmostEqual(luminance_Newhall1943(2.97619312), 6.451426687560192, places=7)
|
def test_n_dimensional_luminance_Newhall1943(self):
'\n Tests :func:`colour.colorimetry.luminance.luminance_Newhall1943`\n definition n-dimensional arrays support.\n '
V = 4.08244375
Y = luminance_Newhall1943(V)
V = np.tile(V, 6)
Y = np.tile(Y, 6)
np.testing.assert_almost_equal(luminance_Newhall1943(V), Y, decimal=7)
V = np.reshape(V, (2, 3))
Y = np.reshape(Y, (2, 3))
np.testing.assert_almost_equal(luminance_Newhall1943(V), Y, decimal=7)
V = np.reshape(V, (2, 3, 1))
Y = np.reshape(Y, (2, 3, 1))
np.testing.assert_almost_equal(luminance_Newhall1943(V), Y, decimal=7)
| 5,203,468,696,075,416,000
|
Tests :func:`colour.colorimetry.luminance.luminance_Newhall1943`
definition n-dimensional arrays support.
|
colour/colorimetry/tests/test_luminance.py
|
test_n_dimensional_luminance_Newhall1943
|
colour-science/colour
|
python
|
def test_n_dimensional_luminance_Newhall1943(self):
'\n Tests :func:`colour.colorimetry.luminance.luminance_Newhall1943`\n definition n-dimensional arrays support.\n '
V = 4.08244375
Y = luminance_Newhall1943(V)
V = np.tile(V, 6)
Y = np.tile(Y, 6)
np.testing.assert_almost_equal(luminance_Newhall1943(V), Y, decimal=7)
V = np.reshape(V, (2, 3))
Y = np.reshape(Y, (2, 3))
np.testing.assert_almost_equal(luminance_Newhall1943(V), Y, decimal=7)
V = np.reshape(V, (2, 3, 1))
Y = np.reshape(Y, (2, 3, 1))
np.testing.assert_almost_equal(luminance_Newhall1943(V), Y, decimal=7)
|
def test_domain_range_scale_luminance_Newhall1943(self):
'\n Tests :func:`colour.colorimetry.luminance.luminance_Newhall1943`\n definition domain and range scale support.\n '
Y = luminance_Newhall1943(4.08244375)
d_r = (('reference', 1, 1), (1, 0.1, 0.01), (100, 10, 1))
for (scale, factor_a, factor_b) in d_r:
with domain_range_scale(scale):
np.testing.assert_almost_equal(luminance_Newhall1943((4.08244375 * factor_a)), (Y * factor_b), decimal=7)
| 5,581,548,457,223,903,000
|
Tests :func:`colour.colorimetry.luminance.luminance_Newhall1943`
definition domain and range scale support.
|
colour/colorimetry/tests/test_luminance.py
|
test_domain_range_scale_luminance_Newhall1943
|
colour-science/colour
|
python
|
def test_domain_range_scale_luminance_Newhall1943(self):
'\n Tests :func:`colour.colorimetry.luminance.luminance_Newhall1943`\n definition domain and range scale support.\n '
Y = luminance_Newhall1943(4.08244375)
d_r = (('reference', 1, 1), (1, 0.1, 0.01), (100, 10, 1))
for (scale, factor_a, factor_b) in d_r:
with domain_range_scale(scale):
np.testing.assert_almost_equal(luminance_Newhall1943((4.08244375 * factor_a)), (Y * factor_b), decimal=7)
|
@ignore_numpy_errors
def test_nan_luminance_Newhall1943(self):
'\n Tests :func:`colour.colorimetry.luminance.luminance_Newhall1943`\n definition nan support.\n '
luminance_Newhall1943(np.array([(- 1.0), 0.0, 1.0, (- np.inf), np.inf, np.nan]))
| 6,877,471,335,887,002,000
|
Tests :func:`colour.colorimetry.luminance.luminance_Newhall1943`
definition nan support.
|
colour/colorimetry/tests/test_luminance.py
|
test_nan_luminance_Newhall1943
|
colour-science/colour
|
python
|
@ignore_numpy_errors
def test_nan_luminance_Newhall1943(self):
'\n Tests :func:`colour.colorimetry.luminance.luminance_Newhall1943`\n definition nan support.\n '
luminance_Newhall1943(np.array([(- 1.0), 0.0, 1.0, (- np.inf), np.inf, np.nan]))
|
def test_luminance_ASTMD1535(self):
'\n Tests :func:`colour.colorimetry.luminance.luminance_ASTMD1535`\n definition.\n '
self.assertAlmostEqual(luminance_ASTMD1535(4.08244375), 12.236342675366036, places=7)
self.assertAlmostEqual(luminance_ASTMD1535(5.39132685), 22.893999867280378, places=7)
self.assertAlmostEqual(luminance_ASTMD1535(2.97619312), 6.290225350905313, places=7)
| 3,155,344,833,414,839,000
|
Tests :func:`colour.colorimetry.luminance.luminance_ASTMD1535`
definition.
|
colour/colorimetry/tests/test_luminance.py
|
test_luminance_ASTMD1535
|
colour-science/colour
|
python
|
def test_luminance_ASTMD1535(self):
'\n Tests :func:`colour.colorimetry.luminance.luminance_ASTMD1535`\n definition.\n '
self.assertAlmostEqual(luminance_ASTMD1535(4.08244375), 12.236342675366036, places=7)
self.assertAlmostEqual(luminance_ASTMD1535(5.39132685), 22.893999867280378, places=7)
self.assertAlmostEqual(luminance_ASTMD1535(2.97619312), 6.290225350905313, places=7)
|
def test_n_dimensional_luminance_ASTMD1535(self):
'\n Tests :func:`colour.colorimetry.luminance.luminance_ASTMD1535`\n definition n-dimensional arrays support.\n '
V = 4.08244375
Y = luminance_ASTMD1535(V)
V = np.tile(V, 6)
Y = np.tile(Y, 6)
np.testing.assert_almost_equal(luminance_ASTMD1535(V), Y, decimal=7)
V = np.reshape(V, (2, 3))
Y = np.reshape(Y, (2, 3))
np.testing.assert_almost_equal(luminance_ASTMD1535(V), Y, decimal=7)
V = np.reshape(V, (2, 3, 1))
Y = np.reshape(Y, (2, 3, 1))
np.testing.assert_almost_equal(luminance_ASTMD1535(V), Y, decimal=7)
| -2,494,964,820,007,343,000
|
Tests :func:`colour.colorimetry.luminance.luminance_ASTMD1535`
definition n-dimensional arrays support.
|
colour/colorimetry/tests/test_luminance.py
|
test_n_dimensional_luminance_ASTMD1535
|
colour-science/colour
|
python
|
def test_n_dimensional_luminance_ASTMD1535(self):
'\n Tests :func:`colour.colorimetry.luminance.luminance_ASTMD1535`\n definition n-dimensional arrays support.\n '
V = 4.08244375
Y = luminance_ASTMD1535(V)
V = np.tile(V, 6)
Y = np.tile(Y, 6)
np.testing.assert_almost_equal(luminance_ASTMD1535(V), Y, decimal=7)
V = np.reshape(V, (2, 3))
Y = np.reshape(Y, (2, 3))
np.testing.assert_almost_equal(luminance_ASTMD1535(V), Y, decimal=7)
V = np.reshape(V, (2, 3, 1))
Y = np.reshape(Y, (2, 3, 1))
np.testing.assert_almost_equal(luminance_ASTMD1535(V), Y, decimal=7)
|
def test_domain_range_scale_luminance_ASTMD1535(self):
'\n Tests :func:`colour.colorimetry.luminance.luminance_ASTMD1535`\n definition domain and range scale support.\n '
Y = luminance_ASTMD1535(4.08244375)
d_r = (('reference', 1, 1), (1, 0.1, 0.01), (100, 10, 1))
for (scale, factor_a, factor_b) in d_r:
with domain_range_scale(scale):
np.testing.assert_almost_equal(luminance_ASTMD1535((4.08244375 * factor_a)), (Y * factor_b), decimal=7)
| 2,091,237,208,208,237,600
|
Tests :func:`colour.colorimetry.luminance.luminance_ASTMD1535`
definition domain and range scale support.
|
colour/colorimetry/tests/test_luminance.py
|
test_domain_range_scale_luminance_ASTMD1535
|
colour-science/colour
|
python
|
def test_domain_range_scale_luminance_ASTMD1535(self):
'\n Tests :func:`colour.colorimetry.luminance.luminance_ASTMD1535`\n definition domain and range scale support.\n '
Y = luminance_ASTMD1535(4.08244375)
d_r = (('reference', 1, 1), (1, 0.1, 0.01), (100, 10, 1))
for (scale, factor_a, factor_b) in d_r:
with domain_range_scale(scale):
np.testing.assert_almost_equal(luminance_ASTMD1535((4.08244375 * factor_a)), (Y * factor_b), decimal=7)
|
@ignore_numpy_errors
def test_nan_luminance_ASTMD1535(self):
'\n Tests :func:`colour.colorimetry.luminance.luminance_ASTMD1535`\n definition nan support.\n '
luminance_ASTMD1535(np.array([(- 1.0), 0.0, 1.0, (- np.inf), np.inf, np.nan]))
| -4,567,697,865,482,675,700
|
Tests :func:`colour.colorimetry.luminance.luminance_ASTMD1535`
definition nan support.
|
colour/colorimetry/tests/test_luminance.py
|
test_nan_luminance_ASTMD1535
|
colour-science/colour
|
python
|
@ignore_numpy_errors
def test_nan_luminance_ASTMD1535(self):
'\n Tests :func:`colour.colorimetry.luminance.luminance_ASTMD1535`\n definition nan support.\n '
luminance_ASTMD1535(np.array([(- 1.0), 0.0, 1.0, (- np.inf), np.inf, np.nan]))
|
def test_intermediate_luminance_function_CIE1976(self):
'\n Tests :func:`colour.colorimetry.luminance.intermediate_luminance_function_CIE1976` definition.\n '
self.assertAlmostEqual(intermediate_luminance_function_CIE1976(0.495929964178047), 12.197225350000002, places=7)
self.assertAlmostEqual(intermediate_luminance_function_CIE1976(0.613072093530391), 23.042767810000004, places=7)
self.assertAlmostEqual(intermediate_luminance_function_CIE1976(0.394876333449113), 6.157200790000001, places=7)
| -8,593,939,569,209,062,000
|
Tests :func:`colour.colorimetry.luminance.intermediate_luminance_function_CIE1976` definition.
|
colour/colorimetry/tests/test_luminance.py
|
test_intermediate_luminance_function_CIE1976
|
colour-science/colour
|
python
|
def test_intermediate_luminance_function_CIE1976(self):
'\n \n '
self.assertAlmostEqual(intermediate_luminance_function_CIE1976(0.495929964178047), 12.197225350000002, places=7)
self.assertAlmostEqual(intermediate_luminance_function_CIE1976(0.613072093530391), 23.042767810000004, places=7)
self.assertAlmostEqual(intermediate_luminance_function_CIE1976(0.394876333449113), 6.157200790000001, places=7)
|
def test_n_dimensional_intermediate_luminance_function_CIE1976(self):
'\n Tests :func:`colour.colorimetry.luminance.intermediate_luminance_function_CIE1976` definition n-dimensional arrays\nsupport.\n '
f_Y_Y_n = 0.495929964178047
Y = intermediate_luminance_function_CIE1976(f_Y_Y_n)
f_Y_Y_n = np.tile(f_Y_Y_n, 6)
Y = np.tile(Y, 6)
np.testing.assert_almost_equal(intermediate_luminance_function_CIE1976(f_Y_Y_n), Y, decimal=7)
f_Y_Y_n = np.reshape(f_Y_Y_n, (2, 3))
Y = np.reshape(Y, (2, 3))
np.testing.assert_almost_equal(intermediate_luminance_function_CIE1976(f_Y_Y_n), Y, decimal=7)
f_Y_Y_n = np.reshape(f_Y_Y_n, (2, 3, 1))
Y = np.reshape(Y, (2, 3, 1))
np.testing.assert_almost_equal(intermediate_luminance_function_CIE1976(f_Y_Y_n), Y, decimal=7)
| 2,127,797,802,917,290,200
|
Tests :func:`colour.colorimetry.luminance.intermediate_luminance_function_CIE1976` definition n-dimensional arrays
support.
|
colour/colorimetry/tests/test_luminance.py
|
test_n_dimensional_intermediate_luminance_function_CIE1976
|
colour-science/colour
|
python
|
def test_n_dimensional_intermediate_luminance_function_CIE1976(self):
'\n Tests :func:`colour.colorimetry.luminance.intermediate_luminance_function_CIE1976` definition n-dimensional arrays\nsupport.\n '
f_Y_Y_n = 0.495929964178047
Y = intermediate_luminance_function_CIE1976(f_Y_Y_n)
f_Y_Y_n = np.tile(f_Y_Y_n, 6)
Y = np.tile(Y, 6)
np.testing.assert_almost_equal(intermediate_luminance_function_CIE1976(f_Y_Y_n), Y, decimal=7)
f_Y_Y_n = np.reshape(f_Y_Y_n, (2, 3))
Y = np.reshape(Y, (2, 3))
np.testing.assert_almost_equal(intermediate_luminance_function_CIE1976(f_Y_Y_n), Y, decimal=7)
f_Y_Y_n = np.reshape(f_Y_Y_n, (2, 3, 1))
Y = np.reshape(Y, (2, 3, 1))
np.testing.assert_almost_equal(intermediate_luminance_function_CIE1976(f_Y_Y_n), Y, decimal=7)
|
def test_domain_range_scale_intermediate_luminance_function_CIE1976(self):
'\n Tests :func:`colour.colorimetry.luminance.intermediate_luminance_function_CIE1976` definition domain and range scale\nsupport.\n '
Y = intermediate_luminance_function_CIE1976(41.52787584465345, 100)
for scale in ('reference', 1, 100):
with domain_range_scale(scale):
np.testing.assert_almost_equal(intermediate_luminance_function_CIE1976(41.52787584465345, 100), Y, decimal=7)
| 3,596,112,416,605,083,600
|
Tests :func:`colour.colorimetry.luminance.intermediate_luminance_function_CIE1976` definition domain and range scale
support.
|
colour/colorimetry/tests/test_luminance.py
|
test_domain_range_scale_intermediate_luminance_function_CIE1976
|
colour-science/colour
|
python
|
def test_domain_range_scale_intermediate_luminance_function_CIE1976(self):
'\n Tests :func:`colour.colorimetry.luminance.intermediate_luminance_function_CIE1976` definition domain and range scale\nsupport.\n '
Y = intermediate_luminance_function_CIE1976(41.52787584465345, 100)
for scale in ('reference', 1, 100):
with domain_range_scale(scale):
np.testing.assert_almost_equal(intermediate_luminance_function_CIE1976(41.52787584465345, 100), Y, decimal=7)
|
@ignore_numpy_errors
def test_nan_intermediate_luminance_function_CIE1976(self):
'\n Tests :func:`colour.colorimetry.luminance.intermediate_luminance_function_CIE1976` definition nan support.\n '
intermediate_luminance_function_CIE1976(np.array([(- 1.0), 0.0, 1.0, (- np.inf), np.inf, np.nan]))
| 2,110,462,359,493,447,400
|
Tests :func:`colour.colorimetry.luminance.intermediate_luminance_function_CIE1976` definition nan support.
|
colour/colorimetry/tests/test_luminance.py
|
test_nan_intermediate_luminance_function_CIE1976
|
colour-science/colour
|
python
|
@ignore_numpy_errors
def test_nan_intermediate_luminance_function_CIE1976(self):
'\n \n '
intermediate_luminance_function_CIE1976(np.array([(- 1.0), 0.0, 1.0, (- np.inf), np.inf, np.nan]))
|
def test_luminance_CIE1976(self):
'\n Tests :func:`colour.colorimetry.luminance.luminance_CIE1976`\n definition.\n '
self.assertAlmostEqual(luminance_CIE1976(41.52787584465345), 12.197225350000002, places=7)
self.assertAlmostEqual(luminance_CIE1976(55.1163628495254), 23.042767810000004, places=7)
self.assertAlmostEqual(luminance_CIE1976(29.805654680097106), 6.157200790000001, places=7)
self.assertAlmostEqual(luminance_CIE1976(56.480581732417676, 50), 12.197225349999998, places=7)
self.assertAlmostEqual(luminance_CIE1976(47.317620274162735, 75), 12.197225350000002, places=7)
self.assertAlmostEqual(luminance_CIE1976(42.51993072812094, 95), 12.197225350000005, places=7)
| -1,002,988,854,156,116,400
|
Tests :func:`colour.colorimetry.luminance.luminance_CIE1976`
definition.
|
colour/colorimetry/tests/test_luminance.py
|
test_luminance_CIE1976
|
colour-science/colour
|
python
|
def test_luminance_CIE1976(self):
'\n Tests :func:`colour.colorimetry.luminance.luminance_CIE1976`\n definition.\n '
self.assertAlmostEqual(luminance_CIE1976(41.52787584465345), 12.197225350000002, places=7)
self.assertAlmostEqual(luminance_CIE1976(55.1163628495254), 23.042767810000004, places=7)
self.assertAlmostEqual(luminance_CIE1976(29.805654680097106), 6.157200790000001, places=7)
self.assertAlmostEqual(luminance_CIE1976(56.480581732417676, 50), 12.197225349999998, places=7)
self.assertAlmostEqual(luminance_CIE1976(47.317620274162735, 75), 12.197225350000002, places=7)
self.assertAlmostEqual(luminance_CIE1976(42.51993072812094, 95), 12.197225350000005, places=7)
|
def test_n_dimensional_luminance_CIE1976(self):
'\n Tests :func:`colour.colorimetry.luminance.luminance_CIE1976`\n definition n-dimensional arrays support.\n '
L_star = 41.52787584465345
Y = luminance_CIE1976(L_star)
L_star = np.tile(L_star, 6)
Y = np.tile(Y, 6)
np.testing.assert_almost_equal(luminance_CIE1976(L_star), Y, decimal=7)
L_star = np.reshape(L_star, (2, 3))
Y = np.reshape(Y, (2, 3))
np.testing.assert_almost_equal(luminance_CIE1976(L_star), Y, decimal=7)
L_star = np.reshape(L_star, (2, 3, 1))
Y = np.reshape(Y, (2, 3, 1))
np.testing.assert_almost_equal(luminance_CIE1976(L_star), Y, decimal=7)
| 7,505,105,724,595,458,000
|
Tests :func:`colour.colorimetry.luminance.luminance_CIE1976`
definition n-dimensional arrays support.
|
colour/colorimetry/tests/test_luminance.py
|
test_n_dimensional_luminance_CIE1976
|
colour-science/colour
|
python
|
def test_n_dimensional_luminance_CIE1976(self):
'\n Tests :func:`colour.colorimetry.luminance.luminance_CIE1976`\n definition n-dimensional arrays support.\n '
L_star = 41.52787584465345
Y = luminance_CIE1976(L_star)
L_star = np.tile(L_star, 6)
Y = np.tile(Y, 6)
np.testing.assert_almost_equal(luminance_CIE1976(L_star), Y, decimal=7)
L_star = np.reshape(L_star, (2, 3))
Y = np.reshape(Y, (2, 3))
np.testing.assert_almost_equal(luminance_CIE1976(L_star), Y, decimal=7)
L_star = np.reshape(L_star, (2, 3, 1))
Y = np.reshape(Y, (2, 3, 1))
np.testing.assert_almost_equal(luminance_CIE1976(L_star), Y, decimal=7)
|
def test_domain_range_scale_luminance_CIE1976(self):
'\n Tests :func:`colour.colorimetry.luminance.luminance_CIE1976`\n definition domain and range scale support.\n '
Y = luminance_CIE1976(41.52787584465345, 100)
d_r = (('reference', 1), (1, 0.01), (100, 1))
for (scale, factor) in d_r:
with domain_range_scale(scale):
np.testing.assert_almost_equal(luminance_CIE1976((41.52787584465345 * factor), 100), (Y * factor), decimal=7)
| 4,783,930,150,008,546,000
|
Tests :func:`colour.colorimetry.luminance.luminance_CIE1976`
definition domain and range scale support.
|
colour/colorimetry/tests/test_luminance.py
|
test_domain_range_scale_luminance_CIE1976
|
colour-science/colour
|
python
|
def test_domain_range_scale_luminance_CIE1976(self):
'\n Tests :func:`colour.colorimetry.luminance.luminance_CIE1976`\n definition domain and range scale support.\n '
Y = luminance_CIE1976(41.52787584465345, 100)
d_r = (('reference', 1), (1, 0.01), (100, 1))
for (scale, factor) in d_r:
with domain_range_scale(scale):
np.testing.assert_almost_equal(luminance_CIE1976((41.52787584465345 * factor), 100), (Y * factor), decimal=7)
|
@ignore_numpy_errors
def test_nan_luminance_CIE1976(self):
'\n Tests :func:`colour.colorimetry.luminance.luminance_CIE1976`\n definition nan support.\n '
luminance_CIE1976(np.array([(- 1.0), 0.0, 1.0, (- np.inf), np.inf, np.nan]))
| -7,773,002,042,440,529,000
|
Tests :func:`colour.colorimetry.luminance.luminance_CIE1976`
definition nan support.
|
colour/colorimetry/tests/test_luminance.py
|
test_nan_luminance_CIE1976
|
colour-science/colour
|
python
|
@ignore_numpy_errors
def test_nan_luminance_CIE1976(self):
'\n Tests :func:`colour.colorimetry.luminance.luminance_CIE1976`\n definition nan support.\n '
luminance_CIE1976(np.array([(- 1.0), 0.0, 1.0, (- np.inf), np.inf, np.nan]))
|
def test_luminance_Fairchild2010(self):
'\n Tests :func:`colour.colorimetry.luminance.luminance_Fairchild2010`\n definition.\n '
self.assertAlmostEqual(luminance_Fairchild2010(31.996390226262736), 0.12197225350000002, places=7)
self.assertAlmostEqual(luminance_Fairchild2010(60.2031536827833), 0.23042767809999998, places=7)
self.assertAlmostEqual(luminance_Fairchild2010(11.83651724097649), 0.06157200790000001, places=7)
self.assertAlmostEqual(luminance_Fairchild2010(24.424283249379986, 2.75), 0.12197225350000002, places=7)
self.assertAlmostEqual(luminance_Fairchild2010(100.01998632737424), 1008.00000024, places=7)
self.assertAlmostEqual(luminance_Fairchild2010(100.01999999709027), 100799.92312466, places=7)
| 1,798,259,972,739,209,000
|
Tests :func:`colour.colorimetry.luminance.luminance_Fairchild2010`
definition.
|
colour/colorimetry/tests/test_luminance.py
|
test_luminance_Fairchild2010
|
colour-science/colour
|
python
|
def test_luminance_Fairchild2010(self):
'\n Tests :func:`colour.colorimetry.luminance.luminance_Fairchild2010`\n definition.\n '
self.assertAlmostEqual(luminance_Fairchild2010(31.996390226262736), 0.12197225350000002, places=7)
self.assertAlmostEqual(luminance_Fairchild2010(60.2031536827833), 0.23042767809999998, places=7)
self.assertAlmostEqual(luminance_Fairchild2010(11.83651724097649), 0.06157200790000001, places=7)
self.assertAlmostEqual(luminance_Fairchild2010(24.424283249379986, 2.75), 0.12197225350000002, places=7)
self.assertAlmostEqual(luminance_Fairchild2010(100.01998632737424), 1008.00000024, places=7)
self.assertAlmostEqual(luminance_Fairchild2010(100.01999999709027), 100799.92312466, places=7)
|
def test_n_dimensional_luminance_Fairchild2010(self):
'\n Tests :func:`colour.colorimetry.luminance.luminance_Fairchild2010`\n definition n-dimensional arrays support.\n '
L_hdr = 31.996390226262736
Y = luminance_Fairchild2010(L_hdr)
L_hdr = np.tile(L_hdr, 6)
Y = np.tile(Y, 6)
np.testing.assert_almost_equal(luminance_Fairchild2010(L_hdr), Y, decimal=7)
L_hdr = np.reshape(L_hdr, (2, 3))
Y = np.reshape(Y, (2, 3))
np.testing.assert_almost_equal(luminance_Fairchild2010(L_hdr), Y, decimal=7)
L_hdr = np.reshape(L_hdr, (2, 3, 1))
Y = np.reshape(Y, (2, 3, 1))
np.testing.assert_almost_equal(luminance_Fairchild2010(L_hdr), Y, decimal=7)
| 2,102,488,940,524,924,200
|
Tests :func:`colour.colorimetry.luminance.luminance_Fairchild2010`
definition n-dimensional arrays support.
|
colour/colorimetry/tests/test_luminance.py
|
test_n_dimensional_luminance_Fairchild2010
|
colour-science/colour
|
python
|
def test_n_dimensional_luminance_Fairchild2010(self):
'\n Tests :func:`colour.colorimetry.luminance.luminance_Fairchild2010`\n definition n-dimensional arrays support.\n '
L_hdr = 31.996390226262736
Y = luminance_Fairchild2010(L_hdr)
L_hdr = np.tile(L_hdr, 6)
Y = np.tile(Y, 6)
np.testing.assert_almost_equal(luminance_Fairchild2010(L_hdr), Y, decimal=7)
L_hdr = np.reshape(L_hdr, (2, 3))
Y = np.reshape(Y, (2, 3))
np.testing.assert_almost_equal(luminance_Fairchild2010(L_hdr), Y, decimal=7)
L_hdr = np.reshape(L_hdr, (2, 3, 1))
Y = np.reshape(Y, (2, 3, 1))
np.testing.assert_almost_equal(luminance_Fairchild2010(L_hdr), Y, decimal=7)
|
def test_domain_range_scale_luminance_Fairchild2010(self):
'\n Tests :func:`colour.colorimetry.luminance.luminance_Fairchild2010`\n definition domain and range scale support.\n '
Y = luminance_Fairchild2010(31.996390226262736)
d_r = (('reference', 1, 1), (1, 0.01, 1), (100, 1, 100))
for (scale, factor_a, factor_b) in d_r:
with domain_range_scale(scale):
np.testing.assert_almost_equal(luminance_Fairchild2010((31.996390226262736 * factor_a)), (Y * factor_b), decimal=7)
| 809,370,376,535,667,800
|
Tests :func:`colour.colorimetry.luminance.luminance_Fairchild2010`
definition domain and range scale support.
|
colour/colorimetry/tests/test_luminance.py
|
test_domain_range_scale_luminance_Fairchild2010
|
colour-science/colour
|
python
|
def test_domain_range_scale_luminance_Fairchild2010(self):
'\n Tests :func:`colour.colorimetry.luminance.luminance_Fairchild2010`\n definition domain and range scale support.\n '
Y = luminance_Fairchild2010(31.996390226262736)
d_r = (('reference', 1, 1), (1, 0.01, 1), (100, 1, 100))
for (scale, factor_a, factor_b) in d_r:
with domain_range_scale(scale):
np.testing.assert_almost_equal(luminance_Fairchild2010((31.996390226262736 * factor_a)), (Y * factor_b), decimal=7)
|
@ignore_numpy_errors
def test_nan_luminance_Fairchild2010(self):
'\n Tests :func:`colour.colorimetry.luminance.luminance_Fairchild2010`\n definition nan support.\n '
luminance_Fairchild2010(np.array([(- 1.0), 0.0, 1.0, (- np.inf), np.inf, np.nan]))
| -6,002,254,242,590,914,000
|
Tests :func:`colour.colorimetry.luminance.luminance_Fairchild2010`
definition nan support.
|
colour/colorimetry/tests/test_luminance.py
|
test_nan_luminance_Fairchild2010
|
colour-science/colour
|
python
|
@ignore_numpy_errors
def test_nan_luminance_Fairchild2010(self):
'\n Tests :func:`colour.colorimetry.luminance.luminance_Fairchild2010`\n definition nan support.\n '
luminance_Fairchild2010(np.array([(- 1.0), 0.0, 1.0, (- np.inf), np.inf, np.nan]))
|
def test_luminance_Fairchild2011(self):
'\n Tests :func:`colour.colorimetry.luminance.luminance_Fairchild2011`\n definition.\n '
self.assertAlmostEqual(luminance_Fairchild2011(51.852958445912506), 0.12197225350000007, places=7)
self.assertAlmostEqual(luminance_Fairchild2011(65.27520795635385), 0.23042767809999998, places=7)
self.assertAlmostEqual(luminance_Fairchild2011(39.81893551071592), 0.06157200790000004, places=7)
self.assertAlmostEqual(luminance_Fairchild2011(0.13268968410139345, 2.75), 0.12197225350000002, places=7)
self.assertAlmostEqual(luminance_Fairchild2011(234.72925681957565), 1008.0, places=7)
self.assertAlmostEqual(luminance_Fairchild2011(245.57059778237573), 100800.0, places=7)
| -6,578,291,207,390,475,000
|
Tests :func:`colour.colorimetry.luminance.luminance_Fairchild2011`
definition.
|
colour/colorimetry/tests/test_luminance.py
|
test_luminance_Fairchild2011
|
colour-science/colour
|
python
|
def test_luminance_Fairchild2011(self):
'\n Tests :func:`colour.colorimetry.luminance.luminance_Fairchild2011`\n definition.\n '
self.assertAlmostEqual(luminance_Fairchild2011(51.852958445912506), 0.12197225350000007, places=7)
self.assertAlmostEqual(luminance_Fairchild2011(65.27520795635385), 0.23042767809999998, places=7)
self.assertAlmostEqual(luminance_Fairchild2011(39.81893551071592), 0.06157200790000004, places=7)
self.assertAlmostEqual(luminance_Fairchild2011(0.13268968410139345, 2.75), 0.12197225350000002, places=7)
self.assertAlmostEqual(luminance_Fairchild2011(234.72925681957565), 1008.0, places=7)
self.assertAlmostEqual(luminance_Fairchild2011(245.57059778237573), 100800.0, places=7)
|
def test_n_dimensional_luminance_Fairchild2011(self):
'\n Tests :func:`colour.colorimetry.luminance.luminance_Fairchild2011`\n definition n-dimensional arrays support.\n '
L_hdr = 51.852958445912506
Y = luminance_Fairchild2011(L_hdr)
L_hdr = np.tile(L_hdr, 6)
Y = np.tile(Y, 6)
np.testing.assert_almost_equal(luminance_Fairchild2011(L_hdr), Y, decimal=7)
L_hdr = np.reshape(L_hdr, (2, 3))
Y = np.reshape(Y, (2, 3))
np.testing.assert_almost_equal(luminance_Fairchild2011(L_hdr), Y, decimal=7)
L_hdr = np.reshape(L_hdr, (2, 3, 1))
Y = np.reshape(Y, (2, 3, 1))
np.testing.assert_almost_equal(luminance_Fairchild2011(L_hdr), Y, decimal=7)
| 6,717,249,832,600,971,000
|
Tests :func:`colour.colorimetry.luminance.luminance_Fairchild2011`
definition n-dimensional arrays support.
|
colour/colorimetry/tests/test_luminance.py
|
test_n_dimensional_luminance_Fairchild2011
|
colour-science/colour
|
python
|
def test_n_dimensional_luminance_Fairchild2011(self):
'\n Tests :func:`colour.colorimetry.luminance.luminance_Fairchild2011`\n definition n-dimensional arrays support.\n '
L_hdr = 51.852958445912506
Y = luminance_Fairchild2011(L_hdr)
L_hdr = np.tile(L_hdr, 6)
Y = np.tile(Y, 6)
np.testing.assert_almost_equal(luminance_Fairchild2011(L_hdr), Y, decimal=7)
L_hdr = np.reshape(L_hdr, (2, 3))
Y = np.reshape(Y, (2, 3))
np.testing.assert_almost_equal(luminance_Fairchild2011(L_hdr), Y, decimal=7)
L_hdr = np.reshape(L_hdr, (2, 3, 1))
Y = np.reshape(Y, (2, 3, 1))
np.testing.assert_almost_equal(luminance_Fairchild2011(L_hdr), Y, decimal=7)
|
def test_domain_range_scale_luminance_Fairchild2011(self):
'\n Tests :func:`colour.colorimetry.luminance.luminance_Fairchild2011`\n definition domain and range scale support.\n '
Y = luminance_Fairchild2011(26.459509817572265)
d_r = (('reference', 1, 1), (1, 0.01, 1), (100, 1, 100))
for (scale, factor_a, factor_b) in d_r:
with domain_range_scale(scale):
np.testing.assert_almost_equal(luminance_Fairchild2011((26.459509817572265 * factor_a)), (Y * factor_b), decimal=7)
| -2,623,130,738,909,725,000
|
Tests :func:`colour.colorimetry.luminance.luminance_Fairchild2011`
definition domain and range scale support.
|
colour/colorimetry/tests/test_luminance.py
|
test_domain_range_scale_luminance_Fairchild2011
|
colour-science/colour
|
python
|
def test_domain_range_scale_luminance_Fairchild2011(self):
'\n Tests :func:`colour.colorimetry.luminance.luminance_Fairchild2011`\n definition domain and range scale support.\n '
Y = luminance_Fairchild2011(26.459509817572265)
d_r = (('reference', 1, 1), (1, 0.01, 1), (100, 1, 100))
for (scale, factor_a, factor_b) in d_r:
with domain_range_scale(scale):
np.testing.assert_almost_equal(luminance_Fairchild2011((26.459509817572265 * factor_a)), (Y * factor_b), decimal=7)
|
@ignore_numpy_errors
def test_nan_luminance_Fairchild2011(self):
'\n Tests :func:`colour.colorimetry.luminance.luminance_Fairchild2011`\n definition nan support.\n '
luminance_Fairchild2011(np.array([(- 1.0), 0.0, 1.0, (- np.inf), np.inf, np.nan]))
| 1,113,389,581,187,477,100
|
Tests :func:`colour.colorimetry.luminance.luminance_Fairchild2011`
definition nan support.
|
colour/colorimetry/tests/test_luminance.py
|
test_nan_luminance_Fairchild2011
|
colour-science/colour
|
python
|
@ignore_numpy_errors
def test_nan_luminance_Fairchild2011(self):
'\n Tests :func:`colour.colorimetry.luminance.luminance_Fairchild2011`\n definition nan support.\n '
luminance_Fairchild2011(np.array([(- 1.0), 0.0, 1.0, (- np.inf), np.inf, np.nan]))
|
def test_luminance_Abebe2017(self):
'\n Tests :func:`colour.colorimetry.luminance.luminance_Abebe2017`\n definition.\n '
self.assertAlmostEqual(luminance_Abebe2017(0.486955571109229), 12.197225350000004, places=7)
self.assertAlmostEqual(luminance_Abebe2017(0.474544792145434, method='Stevens'), 12.197225350000025, places=7)
self.assertAlmostEqual(luminance_Abebe2017(0.286847428534793, 1000), 12.197225350000046, places=7)
self.assertAlmostEqual(luminance_Abebe2017(0.192145492588158, 4000), 12.19722535000012, places=7)
self.assertAlmostEqual(luminance_Abebe2017(0.170365211220992, 4000, method='Stevens'), 12.197225349999933, places=7)
| -1,243,341,332,770,816,800
|
Tests :func:`colour.colorimetry.luminance.luminance_Abebe2017`
definition.
|
colour/colorimetry/tests/test_luminance.py
|
test_luminance_Abebe2017
|
colour-science/colour
|
python
|
def test_luminance_Abebe2017(self):
'\n Tests :func:`colour.colorimetry.luminance.luminance_Abebe2017`\n definition.\n '
self.assertAlmostEqual(luminance_Abebe2017(0.486955571109229), 12.197225350000004, places=7)
self.assertAlmostEqual(luminance_Abebe2017(0.474544792145434, method='Stevens'), 12.197225350000025, places=7)
self.assertAlmostEqual(luminance_Abebe2017(0.286847428534793, 1000), 12.197225350000046, places=7)
self.assertAlmostEqual(luminance_Abebe2017(0.192145492588158, 4000), 12.19722535000012, places=7)
self.assertAlmostEqual(luminance_Abebe2017(0.170365211220992, 4000, method='Stevens'), 12.197225349999933, places=7)
|
def test_n_dimensional_luminance_Abebe2017(self):
'\n Tests :func:`colour.colorimetry.luminance.luminance_Abebe2017`\n definition n-dimensional arrays support.\n '
L = 0.486955571109229
Y = luminance_Abebe2017(L)
L = np.tile(L, 6)
Y = np.tile(Y, 6)
np.testing.assert_almost_equal(luminance_Abebe2017(L), Y, decimal=7)
L = np.reshape(L, (2, 3))
Y = np.reshape(Y, (2, 3))
np.testing.assert_almost_equal(luminance_Abebe2017(L), Y, decimal=7)
L = np.reshape(L, (2, 3, 1))
Y = np.reshape(Y, (2, 3, 1))
np.testing.assert_almost_equal(luminance_Abebe2017(L), Y, decimal=7)
| -34,476,846,152,246,280
|
Tests :func:`colour.colorimetry.luminance.luminance_Abebe2017`
definition n-dimensional arrays support.
|
colour/colorimetry/tests/test_luminance.py
|
test_n_dimensional_luminance_Abebe2017
|
colour-science/colour
|
python
|
def test_n_dimensional_luminance_Abebe2017(self):
'\n Tests :func:`colour.colorimetry.luminance.luminance_Abebe2017`\n definition n-dimensional arrays support.\n '
L = 0.486955571109229
Y = luminance_Abebe2017(L)
L = np.tile(L, 6)
Y = np.tile(Y, 6)
np.testing.assert_almost_equal(luminance_Abebe2017(L), Y, decimal=7)
L = np.reshape(L, (2, 3))
Y = np.reshape(Y, (2, 3))
np.testing.assert_almost_equal(luminance_Abebe2017(L), Y, decimal=7)
L = np.reshape(L, (2, 3, 1))
Y = np.reshape(Y, (2, 3, 1))
np.testing.assert_almost_equal(luminance_Abebe2017(L), Y, decimal=7)
|
def test_domain_range_scale_luminance_Abebe2017(self):
'\n Tests :func:`colour.colorimetry.luminance.luminance_Abebe2017`\n definition domain and range scale support.\n '
L = luminance_Abebe2017(0.486955571109229)
d_r = (('reference', 1), (1, 1), (100, 1))
for (scale, factor) in d_r:
with domain_range_scale(scale):
np.testing.assert_almost_equal(luminance_Abebe2017((0.486955571109229 * factor), (100 * factor)), (L * factor), decimal=7)
| -7,023,710,118,789,896,000
|
Tests :func:`colour.colorimetry.luminance.luminance_Abebe2017`
definition domain and range scale support.
|
colour/colorimetry/tests/test_luminance.py
|
test_domain_range_scale_luminance_Abebe2017
|
colour-science/colour
|
python
|
def test_domain_range_scale_luminance_Abebe2017(self):
'\n Tests :func:`colour.colorimetry.luminance.luminance_Abebe2017`\n definition domain and range scale support.\n '
L = luminance_Abebe2017(0.486955571109229)
d_r = (('reference', 1), (1, 1), (100, 1))
for (scale, factor) in d_r:
with domain_range_scale(scale):
np.testing.assert_almost_equal(luminance_Abebe2017((0.486955571109229 * factor), (100 * factor)), (L * factor), decimal=7)
|
@ignore_numpy_errors
def test_nan_luminance_Abebe2017(self):
'\n Tests :func:`colour.colorimetry.luminance.luminance_Abebe2017`\n definition nan support.\n '
luminance_Abebe2017(*([np.array([(- 1.0), 0.0, 1.0, (- np.inf), np.inf, np.nan])] * 2))
| -9,052,423,624,516,112,000
|
Tests :func:`colour.colorimetry.luminance.luminance_Abebe2017`
definition nan support.
|
colour/colorimetry/tests/test_luminance.py
|
test_nan_luminance_Abebe2017
|
colour-science/colour
|
python
|
@ignore_numpy_errors
def test_nan_luminance_Abebe2017(self):
'\n Tests :func:`colour.colorimetry.luminance.luminance_Abebe2017`\n definition nan support.\n '
luminance_Abebe2017(*([np.array([(- 1.0), 0.0, 1.0, (- np.inf), np.inf, np.nan])] * 2))
|
def test_domain_range_scale_luminance(self):
'\n Tests :func:`colour.colorimetry.luminance.luminance` definition\n domain and range scale support.\n '
m = ('Newhall 1943', 'ASTM D1535', 'CIE 1976', 'Fairchild 2010', 'Fairchild 2011', 'Abebe 2017')
v = [luminance(41.52787584465345, method, Y_n=100) for method in m]
d_r = (('reference', 1), (1, 0.01), (100, 1))
for (method, value) in zip(m, v):
for (scale, factor) in d_r:
with domain_range_scale(scale):
np.testing.assert_almost_equal(luminance((41.52787584465345 * factor), method, Y_n=100), (value * factor), decimal=7)
| 1,611,610,559,232,723,500
|
Tests :func:`colour.colorimetry.luminance.luminance` definition
domain and range scale support.
|
colour/colorimetry/tests/test_luminance.py
|
test_domain_range_scale_luminance
|
colour-science/colour
|
python
|
def test_domain_range_scale_luminance(self):
'\n Tests :func:`colour.colorimetry.luminance.luminance` definition\n domain and range scale support.\n '
m = ('Newhall 1943', 'ASTM D1535', 'CIE 1976', 'Fairchild 2010', 'Fairchild 2011', 'Abebe 2017')
v = [luminance(41.52787584465345, method, Y_n=100) for method in m]
d_r = (('reference', 1), (1, 0.01), (100, 1))
for (method, value) in zip(m, v):
for (scale, factor) in d_r:
with domain_range_scale(scale):
np.testing.assert_almost_equal(luminance((41.52787584465345 * factor), method, Y_n=100), (value * factor), decimal=7)
|
async def cb_nr_subscription_handler(msg: nats.aio.client.Msg):
'Use Callback to process Queue Msg objects.'
try:
logger.info('Received raw message seq:%s, data= %s', msg.sequence, msg.data.decode())
event_message = json.loads(msg.data.decode('utf-8'))
logger.debug('Event Message Received: %s', event_message)
(await process_event(event_message, FLASK_APP))
except Exception:
logger.error('Queue Error: %s', json.dumps(event_message), exc_info=True)
| -2,828,123,328,637,019,600
|
Use Callback to process Queue Msg objects.
|
queue_services/business-events-listener/src/business_events_listener/worker.py
|
cb_nr_subscription_handler
|
saravanpa-aot/sbc-auth
|
python
|
async def cb_nr_subscription_handler(msg: nats.aio.client.Msg):
try:
logger.info('Received raw message seq:%s, data= %s', msg.sequence, msg.data.decode())
event_message = json.loads(msg.data.decode('utf-8'))
logger.debug('Event Message Received: %s', event_message)
(await process_event(event_message, FLASK_APP))
except Exception:
logger.error('Queue Error: %s', json.dumps(event_message), exc_info=True)
|
async def process_event(event_message, flask_app):
'Render the org status.'
if (not flask_app):
raise QueueException('Flask App not available.')
with flask_app.app_context():
message_type = event_message.get('type', None)
if (message_type == 'bc.registry.names.events'):
(await process_name_events(event_message))
| 1,129,332,888,272,081,300
|
Render the org status.
|
queue_services/business-events-listener/src/business_events_listener/worker.py
|
process_event
|
saravanpa-aot/sbc-auth
|
python
|
async def process_event(event_message, flask_app):
if (not flask_app):
raise QueueException('Flask App not available.')
with flask_app.app_context():
message_type = event_message.get('type', None)
if (message_type == 'bc.registry.names.events'):
(await process_name_events(event_message))
|
async def process_name_events(event_message: Dict[(str, any)]):
"Process name events.\n\n 1. Check if the NR already exists in entities table, if yes apply changes. If not create entity record.\n 2. Check if new status is DRAFT, if yes call pay-api and get the account details for the payments against the NR.\n 3. If an account is found, affiliate to that account.\n\n Args:\n event_message (object): cloud event message, sample below.\n {\n 'specversion': '1.0.1',\n 'type': 'bc.registry.names.events',\n 'source': '/requests/6724165',\n 'id': id,\n 'time': '',\n 'datacontenttype': 'application/json',\n 'identifier': '781020202',\n 'data': {\n 'request': {\n 'nrNum': 'NR 5659951',\n 'newState': 'APPROVED',\n 'previousState': 'DRAFT'\n }\n }\n }\n "
logger.debug('>>>>>>>process_name_events>>>>>')
request_data = event_message.get('data').get('request')
nr_number = request_data['nrNum']
nr_status = request_data['newState']
nr_entity = EntityModel.find_by_business_identifier(nr_number)
if (nr_entity is None):
logger.info('Entity doesnt exist, creating a new entity.')
nr_entity = EntityModel(business_identifier=nr_number, corp_type_code=CorpType.NR.value)
nr_entity.status = nr_status
nr_entity.name = request_data.get('name', '')
nr_entity.last_modified_by = None
nr_entity.last_modified = parser.parse(event_message.get('time'))
if ((nr_status == 'DRAFT') and (AffiliationModel.find_affiliations_by_business_identifier(nr_number) is None)):
logger.info('Status is DRAFT, getting invoices for account')
invoices = RestService.get(f'{APP_CONFIG.PAY_API_URL}/payment-requests?businessIdentifier={nr_number}', token=RestService.get_service_account_token()).json()
if (invoices and (auth_account_id := invoices['invoices'][0].get('paymentAccount').get('accountId')) and str(auth_account_id).isnumeric()):
logger.info('Account ID received : %s', auth_account_id)
org: OrgModel = db.session.query(OrgModel).filter((OrgModel.id == auth_account_id)).one_or_none()
if org:
nr_entity.pass_code_claimed = True
logger.info('Creating affiliation between Entity : %s and Org : %s', nr_entity, org)
affiliation: AffiliationModel = AffiliationModel(entity=nr_entity, org=org)
affiliation.flush()
nr_entity.save()
logger.debug('<<<<<<<process_name_events<<<<<<<<<<')
| 162,389,578,599,781,470
|
Process name events.
1. Check if the NR already exists in entities table, if yes apply changes. If not create entity record.
2. Check if new status is DRAFT, if yes call pay-api and get the account details for the payments against the NR.
3. If an account is found, affiliate to that account.
Args:
event_message (object): cloud event message, sample below.
{
'specversion': '1.0.1',
'type': 'bc.registry.names.events',
'source': '/requests/6724165',
'id': id,
'time': '',
'datacontenttype': 'application/json',
'identifier': '781020202',
'data': {
'request': {
'nrNum': 'NR 5659951',
'newState': 'APPROVED',
'previousState': 'DRAFT'
}
}
}
|
queue_services/business-events-listener/src/business_events_listener/worker.py
|
process_name_events
|
saravanpa-aot/sbc-auth
|
python
|
async def process_name_events(event_message: Dict[(str, any)]):
"Process name events.\n\n 1. Check if the NR already exists in entities table, if yes apply changes. If not create entity record.\n 2. Check if new status is DRAFT, if yes call pay-api and get the account details for the payments against the NR.\n 3. If an account is found, affiliate to that account.\n\n Args:\n event_message (object): cloud event message, sample below.\n {\n 'specversion': '1.0.1',\n 'type': 'bc.registry.names.events',\n 'source': '/requests/6724165',\n 'id': id,\n 'time': ,\n 'datacontenttype': 'application/json',\n 'identifier': '781020202',\n 'data': {\n 'request': {\n 'nrNum': 'NR 5659951',\n 'newState': 'APPROVED',\n 'previousState': 'DRAFT'\n }\n }\n }\n "
logger.debug('>>>>>>>process_name_events>>>>>')
request_data = event_message.get('data').get('request')
nr_number = request_data['nrNum']
nr_status = request_data['newState']
nr_entity = EntityModel.find_by_business_identifier(nr_number)
if (nr_entity is None):
logger.info('Entity doesnt exist, creating a new entity.')
nr_entity = EntityModel(business_identifier=nr_number, corp_type_code=CorpType.NR.value)
nr_entity.status = nr_status
nr_entity.name = request_data.get('name', )
nr_entity.last_modified_by = None
nr_entity.last_modified = parser.parse(event_message.get('time'))
if ((nr_status == 'DRAFT') and (AffiliationModel.find_affiliations_by_business_identifier(nr_number) is None)):
logger.info('Status is DRAFT, getting invoices for account')
invoices = RestService.get(f'{APP_CONFIG.PAY_API_URL}/payment-requests?businessIdentifier={nr_number}', token=RestService.get_service_account_token()).json()
if (invoices and (auth_account_id := invoices['invoices'][0].get('paymentAccount').get('accountId')) and str(auth_account_id).isnumeric()):
logger.info('Account ID received : %s', auth_account_id)
org: OrgModel = db.session.query(OrgModel).filter((OrgModel.id == auth_account_id)).one_or_none()
if org:
nr_entity.pass_code_claimed = True
logger.info('Creating affiliation between Entity : %s and Org : %s', nr_entity, org)
affiliation: AffiliationModel = AffiliationModel(entity=nr_entity, org=org)
affiliation.flush()
nr_entity.save()
logger.debug('<<<<<<<process_name_events<<<<<<<<<<')
|
def set_optional_property(destination, source, prop_name):
' Copies the property value if present. '
if (prop_name in source):
destination[prop_name] = source[prop_name]
| -650,095,351,457,198,500
|
Copies the property value if present.
|
dm/templates/external_load_balancer/external_load_balancer.py
|
set_optional_property
|
Dileepbodapati/cloud-foundation-toolkit
|
python
|
def set_optional_property(destination, source, prop_name):
' '
if (prop_name in source):
destination[prop_name] = source[prop_name]
|
def get_backend_service(properties, backend_spec, res_name, project_id):
' Creates the backend service. '
name = backend_spec.get('resourceName', res_name)
backend_name = backend_spec.get('name', name)
backend_properties = {'name': backend_name, 'project': project_id, 'loadBalancingScheme': 'EXTERNAL', 'protocol': get_protocol(properties)}
backend_resource = {'name': name, 'type': 'backend_service.py', 'properties': backend_properties}
optional_properties = ['description', 'backends', 'timeoutSec', 'sessionAffinity', 'connectionDraining', 'backends', 'healthCheck', 'healthChecks', 'portName', 'enableCDN', 'affinityCookieTtlSec']
for prop in optional_properties:
set_optional_property(backend_properties, backend_spec, prop)
return ([backend_resource], [{'name': 'backendServiceName', 'value': backend_name}, {'name': 'backendServiceSelfLink', 'value': '$(ref.{}.selfLink)'.format(name)}])
| -7,261,856,017,208,346,000
|
Creates the backend service.
|
dm/templates/external_load_balancer/external_load_balancer.py
|
get_backend_service
|
Dileepbodapati/cloud-foundation-toolkit
|
python
|
def get_backend_service(properties, backend_spec, res_name, project_id):
' '
name = backend_spec.get('resourceName', res_name)
backend_name = backend_spec.get('name', name)
backend_properties = {'name': backend_name, 'project': project_id, 'loadBalancingScheme': 'EXTERNAL', 'protocol': get_protocol(properties)}
backend_resource = {'name': name, 'type': 'backend_service.py', 'properties': backend_properties}
optional_properties = ['description', 'backends', 'timeoutSec', 'sessionAffinity', 'connectionDraining', 'backends', 'healthCheck', 'healthChecks', 'portName', 'enableCDN', 'affinityCookieTtlSec']
for prop in optional_properties:
set_optional_property(backend_properties, backend_spec, prop)
return ([backend_resource], [{'name': 'backendServiceName', 'value': backend_name}, {'name': 'backendServiceSelfLink', 'value': '$(ref.{}.selfLink)'.format(name)}])
|
def get_forwarding_rule(properties, target, res_name, project_id):
' Creates the forwarding rule. '
name = '{}-forwarding-rule'.format(res_name)
rule_properties = {'name': properties.get('name', res_name), 'project': project_id, 'loadBalancingScheme': 'EXTERNAL', 'target': '$(ref.{}.selfLink)'.format(target['name']), 'IPProtocol': 'TCP'}
rule_resource = {'name': name, 'type': 'forwarding_rule.py', 'properties': rule_properties, 'metadata': {'dependsOn': [target['name']]}}
optional_properties = ['description', 'IPAddress', 'ipVersion', 'portRange']
for prop in optional_properties:
set_optional_property(rule_properties, properties, prop)
return ([rule_resource], [{'name': 'forwardingRuleName', 'value': rule_properties['name']}, {'name': 'forwardingRuleSelfLink', 'value': '$(ref.{}.selfLink)'.format(name)}, {'name': 'IPAddress', 'value': '$(ref.{}.IPAddress)'.format(name)}])
| 8,396,629,647,795,001,000
|
Creates the forwarding rule.
|
dm/templates/external_load_balancer/external_load_balancer.py
|
get_forwarding_rule
|
Dileepbodapati/cloud-foundation-toolkit
|
python
|
def get_forwarding_rule(properties, target, res_name, project_id):
' '
name = '{}-forwarding-rule'.format(res_name)
rule_properties = {'name': properties.get('name', res_name), 'project': project_id, 'loadBalancingScheme': 'EXTERNAL', 'target': '$(ref.{}.selfLink)'.format(target['name']), 'IPProtocol': 'TCP'}
rule_resource = {'name': name, 'type': 'forwarding_rule.py', 'properties': rule_properties, 'metadata': {'dependsOn': [target['name']]}}
optional_properties = ['description', 'IPAddress', 'ipVersion', 'portRange']
for prop in optional_properties:
set_optional_property(rule_properties, properties, prop)
return ([rule_resource], [{'name': 'forwardingRuleName', 'value': rule_properties['name']}, {'name': 'forwardingRuleSelfLink', 'value': '$(ref.{}.selfLink)'.format(name)}, {'name': 'IPAddress', 'value': '$(ref.{}.IPAddress)'.format(name)}])
|
def get_backend_services(properties, res_name, project_id):
' Creates all backend services to be used by the load balancer. '
backend_resources = []
backend_outputs_map = {'backendServiceName': [], 'backendServiceSelfLink': []}
backend_specs = properties['backendServices']
for backend_spec in backend_specs:
backend_res_name = '{}-backend-service-{}'.format(res_name, sha1(json.dumps(backend_spec).encode('utf-8')).hexdigest()[:10])
(resources, outputs) = get_backend_service(properties, backend_spec, backend_res_name, project_id)
backend_resources += resources
for output in outputs:
backend_outputs_map[output['name']].append(output['value'])
backend_outputs = []
for (key, value) in backend_outputs_map.items():
backend_outputs.append({'name': (key + 's'), 'value': value})
return (backend_resources, backend_outputs)
| -5,058,733,043,978,881,000
|
Creates all backend services to be used by the load balancer.
|
dm/templates/external_load_balancer/external_load_balancer.py
|
get_backend_services
|
Dileepbodapati/cloud-foundation-toolkit
|
python
|
def get_backend_services(properties, res_name, project_id):
' '
backend_resources = []
backend_outputs_map = {'backendServiceName': [], 'backendServiceSelfLink': []}
backend_specs = properties['backendServices']
for backend_spec in backend_specs:
backend_res_name = '{}-backend-service-{}'.format(res_name, sha1(json.dumps(backend_spec).encode('utf-8')).hexdigest()[:10])
(resources, outputs) = get_backend_service(properties, backend_spec, backend_res_name, project_id)
backend_resources += resources
for output in outputs:
backend_outputs_map[output['name']].append(output['value'])
backend_outputs = []
for (key, value) in backend_outputs_map.items():
backend_outputs.append({'name': (key + 's'), 'value': value})
return (backend_resources, backend_outputs)
|
def get_ref(name, prop='selfLink'):
' Creates reference to a property of a given resource. '
return '$(ref.{}.{})'.format(name, prop)
| 2,922,289,469,912,184,000
|
Creates reference to a property of a given resource.
|
dm/templates/external_load_balancer/external_load_balancer.py
|
get_ref
|
Dileepbodapati/cloud-foundation-toolkit
|
python
|
def get_ref(name, prop='selfLink'):
' '
return '$(ref.{}.{})'.format(name, prop)
|
def update_refs_recursively(properties):
' Replaces service names with the service selflinks recursively. '
for prop in properties:
value = properties[prop]
if ((prop == 'defaultService') or (prop == 'service')):
is_regular_name = ((not ('.' in value)) and (not ('/' in value)))
if is_regular_name:
properties[prop] = get_ref(value)
elif isinstance(value, dict):
update_refs_recursively(value)
elif isinstance(value, list):
for item in value:
if isinstance(item, dict):
update_refs_recursively(item)
| -6,272,683,023,836,831,000
|
Replaces service names with the service selflinks recursively.
|
dm/templates/external_load_balancer/external_load_balancer.py
|
update_refs_recursively
|
Dileepbodapati/cloud-foundation-toolkit
|
python
|
def update_refs_recursively(properties):
' '
for prop in properties:
value = properties[prop]
if ((prop == 'defaultService') or (prop == 'service')):
is_regular_name = ((not ('.' in value)) and (not ('/' in value)))
if is_regular_name:
properties[prop] = get_ref(value)
elif isinstance(value, dict):
update_refs_recursively(value)
elif isinstance(value, list):
for item in value:
if isinstance(item, dict):
update_refs_recursively(item)
|
def get_url_map(properties, res_name, project_id):
' Creates a UrlMap resource. '
spec = copy.deepcopy(properties)
spec['project'] = project_id
spec['name'] = properties.get('name', res_name)
update_refs_recursively(spec)
resource = {'name': res_name, 'type': 'url_map.py', 'properties': spec}
self_link = '$(ref.{}.selfLink)'.format(res_name)
return (self_link, [resource], [{'name': 'urlMapName', 'value': '$(ref.{}.name)'.format(res_name)}, {'name': 'urlMapSelfLink', 'value': self_link}])
| -2,892,676,959,789,977,000
|
Creates a UrlMap resource.
|
dm/templates/external_load_balancer/external_load_balancer.py
|
get_url_map
|
Dileepbodapati/cloud-foundation-toolkit
|
python
|
def get_url_map(properties, res_name, project_id):
' '
spec = copy.deepcopy(properties)
spec['project'] = project_id
spec['name'] = properties.get('name', res_name)
update_refs_recursively(spec)
resource = {'name': res_name, 'type': 'url_map.py', 'properties': spec}
self_link = '$(ref.{}.selfLink)'.format(res_name)
return (self_link, [resource], [{'name': 'urlMapName', 'value': '$(ref.{}.name)'.format(res_name)}, {'name': 'urlMapSelfLink', 'value': self_link}])
|
def get_target_proxy(properties, res_name, project_id, bs_resources):
' Creates a target proxy resource. '
protocol = get_protocol(properties)
depends = []
if ('HTTP' in protocol):
urlMap = copy.deepcopy(properties['urlMap'])
if (('name' not in urlMap) and ('name' in properties)):
urlMap['name'] = '{}-url-map'.format(properties['name'])
(target, resources, outputs) = get_url_map(urlMap, '{}-url-map'.format(res_name), project_id)
depends.append(resources[0]['name'])
else:
depends.append(bs_resources[0]['name'])
target = get_ref(bs_resources[0]['name'])
resources = []
outputs = []
name = '{}-target'.format(res_name)
proxy = {'name': name, 'type': 'target_proxy.py', 'properties': {'name': '{}-target'.format(properties.get('name', res_name)), 'project': project_id, 'protocol': protocol, 'target': target}, 'metadata': {'dependsOn': [depends]}}
for prop in ['proxyHeader', 'quicOverride']:
set_optional_property(proxy['properties'], properties, prop)
outputs.extend([{'name': 'targetProxyName', 'value': '$(ref.{}.name)'.format(name)}, {'name': 'targetProxySelfLink', 'value': '$(ref.{}.selfLink)'.format(name)}, {'name': 'targetProxyKind', 'value': '$(ref.{}.kind)'.format(name)}])
if ('ssl' in properties):
ssl_spec = properties['ssl']
proxy['properties']['ssl'] = ssl_spec
creates_new_certificate = (not ('url' in ssl_spec['certificate']))
if creates_new_certificate:
outputs.extend([{'name': 'certificateName', 'value': '$(ref.{}.certificateName)'.format(name)}, {'name': 'certificateSelfLink', 'value': '$(ref.{}.certificateSelfLink)'.format(name)}])
return (([proxy] + resources), outputs)
| -7,372,446,502,140,090,000
|
Creates a target proxy resource.
|
dm/templates/external_load_balancer/external_load_balancer.py
|
get_target_proxy
|
Dileepbodapati/cloud-foundation-toolkit
|
python
|
def get_target_proxy(properties, res_name, project_id, bs_resources):
' '
protocol = get_protocol(properties)
depends = []
if ('HTTP' in protocol):
urlMap = copy.deepcopy(properties['urlMap'])
if (('name' not in urlMap) and ('name' in properties)):
urlMap['name'] = '{}-url-map'.format(properties['name'])
(target, resources, outputs) = get_url_map(urlMap, '{}-url-map'.format(res_name), project_id)
depends.append(resources[0]['name'])
else:
depends.append(bs_resources[0]['name'])
target = get_ref(bs_resources[0]['name'])
resources = []
outputs = []
name = '{}-target'.format(res_name)
proxy = {'name': name, 'type': 'target_proxy.py', 'properties': {'name': '{}-target'.format(properties.get('name', res_name)), 'project': project_id, 'protocol': protocol, 'target': target}, 'metadata': {'dependsOn': [depends]}}
for prop in ['proxyHeader', 'quicOverride']:
set_optional_property(proxy['properties'], properties, prop)
outputs.extend([{'name': 'targetProxyName', 'value': '$(ref.{}.name)'.format(name)}, {'name': 'targetProxySelfLink', 'value': '$(ref.{}.selfLink)'.format(name)}, {'name': 'targetProxyKind', 'value': '$(ref.{}.kind)'.format(name)}])
if ('ssl' in properties):
ssl_spec = properties['ssl']
proxy['properties']['ssl'] = ssl_spec
creates_new_certificate = (not ('url' in ssl_spec['certificate']))
if creates_new_certificate:
outputs.extend([{'name': 'certificateName', 'value': '$(ref.{}.certificateName)'.format(name)}, {'name': 'certificateSelfLink', 'value': '$(ref.{}.certificateSelfLink)'.format(name)}])
return (([proxy] + resources), outputs)
|
def get_protocol(properties):
' Finds what network protocol to use. '
is_web = ('urlMap' in properties)
is_secure = ('ssl' in properties)
if is_web:
if is_secure:
return 'HTTPS'
return 'HTTP'
if is_secure:
return 'SSL'
return 'TCP'
| -4,319,831,524,416,239,600
|
Finds what network protocol to use.
|
dm/templates/external_load_balancer/external_load_balancer.py
|
get_protocol
|
Dileepbodapati/cloud-foundation-toolkit
|
python
|
def get_protocol(properties):
' '
is_web = ('urlMap' in properties)
is_secure = ('ssl' in properties)
if is_web:
if is_secure:
return 'HTTPS'
return 'HTTP'
if is_secure:
return 'SSL'
return 'TCP'
|
def generate_config(context):
' Entry point for the deployment resources. '
properties = context.properties
project_id = properties.get('project', context.env['project'])
(bs_resources, bs_outputs) = get_backend_services(properties, context.env['name'], project_id)
(target_resources, target_outputs) = get_target_proxy(properties, context.env['name'], project_id, bs_resources)
(rule_resources, rule_outputs) = get_forwarding_rule(properties, target_resources[0], context.env['name'], project_id)
return {'resources': ((bs_resources + target_resources) + rule_resources), 'outputs': ((bs_outputs + target_outputs) + rule_outputs)}
| -5,912,626,140,741,447,000
|
Entry point for the deployment resources.
|
dm/templates/external_load_balancer/external_load_balancer.py
|
generate_config
|
Dileepbodapati/cloud-foundation-toolkit
|
python
|
def generate_config(context):
' '
properties = context.properties
project_id = properties.get('project', context.env['project'])
(bs_resources, bs_outputs) = get_backend_services(properties, context.env['name'], project_id)
(target_resources, target_outputs) = get_target_proxy(properties, context.env['name'], project_id, bs_resources)
(rule_resources, rule_outputs) = get_forwarding_rule(properties, target_resources[0], context.env['name'], project_id)
return {'resources': ((bs_resources + target_resources) + rule_resources), 'outputs': ((bs_outputs + target_outputs) + rule_outputs)}
|
def get_current_arch():
' Try to get the architecture for the current platform '
if sys.platform.startswith('win'):
machine = platform.machine()
if (machine == 'AMD64'):
return get_arch('x86_64:wincc')
elif (sys.platform in ('linux', 'darwin')):
if (platform.architecture()[0] == '64bit'):
return get_arch('x86_64')
| -3,940,666,364,675,064,000
|
Try to get the architecture for the current platform
|
ppci/arch/__init__.py
|
get_current_arch
|
darleybarreto/ppci-mirror
|
python
|
def get_current_arch():
' '
if sys.platform.startswith('win'):
machine = platform.machine()
if (machine == 'AMD64'):
return get_arch('x86_64:wincc')
elif (sys.platform in ('linux', 'darwin')):
if (platform.architecture()[0] == '64bit'):
return get_arch('x86_64')
|
def get_arch(arch):
"Try to return an architecture instance.\n\n Args:\n arch: can be a string in the form of arch:option1:option2\n\n .. doctest::\n\n >>> from ppci.api import get_arch\n >>> arch = get_arch('msp430')\n >>> arch\n msp430-arch\n >>> type(arch)\n <class 'ppci.arch.msp430.arch.Msp430Arch'>\n "
if isinstance(arch, Architecture):
return arch
elif isinstance(arch, str):
from .target_list import create_arch
if (':' in arch):
parts = arch.split(':')
return create_arch(parts[0], options=tuple(parts[1:]))
else:
return create_arch(arch)
raise ValueError('Invalid architecture {}'.format(arch))
| -2,377,674,193,364,040,700
|
Try to return an architecture instance.
Args:
arch: can be a string in the form of arch:option1:option2
.. doctest::
>>> from ppci.api import get_arch
>>> arch = get_arch('msp430')
>>> arch
msp430-arch
>>> type(arch)
<class 'ppci.arch.msp430.arch.Msp430Arch'>
|
ppci/arch/__init__.py
|
get_arch
|
darleybarreto/ppci-mirror
|
python
|
def get_arch(arch):
"Try to return an architecture instance.\n\n Args:\n arch: can be a string in the form of arch:option1:option2\n\n .. doctest::\n\n >>> from ppci.api import get_arch\n >>> arch = get_arch('msp430')\n >>> arch\n msp430-arch\n >>> type(arch)\n <class 'ppci.arch.msp430.arch.Msp430Arch'>\n "
if isinstance(arch, Architecture):
return arch
elif isinstance(arch, str):
from .target_list import create_arch
if (':' in arch):
parts = arch.split(':')
return create_arch(parts[0], options=tuple(parts[1:]))
else:
return create_arch(arch)
raise ValueError('Invalid architecture {}'.format(arch))
|
def test_ComposedBool(self):
'Test ComposedBool'
pass
| 1,148,415,783,762,149,200
|
Test ComposedBool
|
samples/openapi3/client/petstore/python-experimental/test/test_composed_bool.py
|
test_ComposedBool
|
AndreasA/openapi-generator
|
python
|
def test_ComposedBool(self):
pass
|
def __init__(self, ray_trafo, callback_func=None, callback_func_interval=100, show_pbar=True, torch_manual_seed=10, **kwargs):
'\n Parameters\n ----------\n ray_trafo : `odl.tomo.operators.RayTransform`\n The forward operator\n callback_func : callable, optional\n Callable with signature\n ``callback_func(iteration, reconstruction, loss)`` that is called\n after every `callback_func_interval` iterations, starting\n after the first iteration. It is additionally called after the\n last iteration.\n Note that it differs from the inherited\n `IterativeReconstructor.callback` (which is also supported) in that\n the latter is of type :class:`odl.solvers.util.callback.Callback`,\n which only receives the reconstruction, such that the loss would\n have to be recomputed.\n callback_func_interval : int, optional\n Number of iterations between calls to `callback_func`.\n Default: `100`.\n show_pbar : bool, optional\n Whether to show a tqdm progress bar during reconstruction.\n torch_manual_seed : int, optional\n Fixed seed to set by ``torch.manual_seed`` before reconstruction.\n The default is `10`. It can be set to `None` or `False` to disable\n the manual seed.\n '
super().__init__(reco_space=ray_trafo.domain, observation_space=ray_trafo.range, **kwargs)
self.callback_func = callback_func
self.ray_trafo = ray_trafo
self.ray_trafo_module = OperatorModule(self.ray_trafo)
self.callback_func = callback_func
self.callback_func_interval = callback_func_interval
self.show_pbar = show_pbar
self.torch_manual_seed = torch_manual_seed
| -4,383,685,644,072,515,000
|
Parameters
----------
ray_trafo : `odl.tomo.operators.RayTransform`
The forward operator
callback_func : callable, optional
Callable with signature
``callback_func(iteration, reconstruction, loss)`` that is called
after every `callback_func_interval` iterations, starting
after the first iteration. It is additionally called after the
last iteration.
Note that it differs from the inherited
`IterativeReconstructor.callback` (which is also supported) in that
the latter is of type :class:`odl.solvers.util.callback.Callback`,
which only receives the reconstruction, such that the loss would
have to be recomputed.
callback_func_interval : int, optional
Number of iterations between calls to `callback_func`.
Default: `100`.
show_pbar : bool, optional
Whether to show a tqdm progress bar during reconstruction.
torch_manual_seed : int, optional
Fixed seed to set by ``torch.manual_seed`` before reconstruction.
The default is `10`. It can be set to `None` or `False` to disable
the manual seed.
|
dival/reconstructors/dip_ct_reconstructor.py
|
__init__
|
MBaltz/dival
|
python
|
def __init__(self, ray_trafo, callback_func=None, callback_func_interval=100, show_pbar=True, torch_manual_seed=10, **kwargs):
'\n Parameters\n ----------\n ray_trafo : `odl.tomo.operators.RayTransform`\n The forward operator\n callback_func : callable, optional\n Callable with signature\n ``callback_func(iteration, reconstruction, loss)`` that is called\n after every `callback_func_interval` iterations, starting\n after the first iteration. It is additionally called after the\n last iteration.\n Note that it differs from the inherited\n `IterativeReconstructor.callback` (which is also supported) in that\n the latter is of type :class:`odl.solvers.util.callback.Callback`,\n which only receives the reconstruction, such that the loss would\n have to be recomputed.\n callback_func_interval : int, optional\n Number of iterations between calls to `callback_func`.\n Default: `100`.\n show_pbar : bool, optional\n Whether to show a tqdm progress bar during reconstruction.\n torch_manual_seed : int, optional\n Fixed seed to set by ``torch.manual_seed`` before reconstruction.\n The default is `10`. It can be set to `None` or `False` to disable\n the manual seed.\n '
super().__init__(reco_space=ray_trafo.domain, observation_space=ray_trafo.range, **kwargs)
self.callback_func = callback_func
self.ray_trafo = ray_trafo
self.ray_trafo_module = OperatorModule(self.ray_trafo)
self.callback_func = callback_func
self.callback_func_interval = callback_func_interval
self.show_pbar = show_pbar
self.torch_manual_seed = torch_manual_seed
|
def get_user_details(self, response):
'Return user details from Facebook account'
email = response.get('email')
return {'email': email, 'username': email.split('@')[0]}
| 5,842,184,032,588,614,000
|
Return user details from Facebook account
|
social_core/backends/paipass.py
|
get_user_details
|
everchain-ontech/social-core
|
python
|
def get_user_details(self, response):
email = response.get('email')
return {'email': email, 'username': email.split('@')[0]}
|
def user_data(self, access_token, *args, **kwargs):
'Loads user data from service'
params = self.setting('PROFILE_EXTRA_PARAMS', {})
response = (kwargs.get('response') or {})
params['access_token'] = access_token
headers = {'Authorization': ('%s %s' % (response.get('token_type', 'Bearer').capitalize(), access_token)), 'Accept': 'application/json', 'Content-type': 'application/json;charset=utf-8'}
return self.get_json(self.USER_DATA_URL, params=params, headers=headers)
| 7,723,685,861,222,839,000
|
Loads user data from service
|
social_core/backends/paipass.py
|
user_data
|
everchain-ontech/social-core
|
python
|
def user_data(self, access_token, *args, **kwargs):
params = self.setting('PROFILE_EXTRA_PARAMS', {})
response = (kwargs.get('response') or {})
params['access_token'] = access_token
headers = {'Authorization': ('%s %s' % (response.get('token_type', 'Bearer').capitalize(), access_token)), 'Accept': 'application/json', 'Content-type': 'application/json;charset=utf-8'}
return self.get_json(self.USER_DATA_URL, params=params, headers=headers)
|
def get_redirect_uri(self, state=None):
'Build redirect with redirect_state parameter.'
regex = re.compile('\\:(80|443)\\/')
uri = regex.sub('/', self.redirect_uri)
if (self.REDIRECT_STATE and state):
uri = url_add_parameters(uri, {'redirect_state': state})
return uri
| 3,286,736,078,406,330,400
|
Build redirect with redirect_state parameter.
|
social_core/backends/paipass.py
|
get_redirect_uri
|
everchain-ontech/social-core
|
python
|
def get_redirect_uri(self, state=None):
regex = re.compile('\\:(80|443)\\/')
uri = regex.sub('/', self.redirect_uri)
if (self.REDIRECT_STATE and state):
uri = url_add_parameters(uri, {'redirect_state': state})
return uri
|
@handle_http_errors
def do_auth(self, access_token, *args, **kwargs):
'Finish the auth process once the access_token was retrieved'
data = self.user_data(access_token, *args, **kwargs)
response = (kwargs.get('response') or {})
response.update((data or {}))
if ('access_token' not in response):
response['access_token'] = access_token
kwargs.update({'response': response, 'backend': self})
return self.strategy.authenticate(*args, **kwargs)
| 492,624,721,670,898,100
|
Finish the auth process once the access_token was retrieved
|
social_core/backends/paipass.py
|
do_auth
|
everchain-ontech/social-core
|
python
|
@handle_http_errors
def do_auth(self, access_token, *args, **kwargs):
data = self.user_data(access_token, *args, **kwargs)
response = (kwargs.get('response') or {})
response.update((data or {}))
if ('access_token' not in response):
response['access_token'] = access_token
kwargs.update({'response': response, 'backend': self})
return self.strategy.authenticate(*args, **kwargs)
|
def __init__(self):
'\n Initialization of ConnManager\n '
logging.info('Initializing ConnmanagerMQTT')
self.__connection = UConnMQTT()
self.__message_number = random.randint(0, 65536)
self.__sent_messages = dict()
self.__callback = None
self.__callback_object = None
| -5,099,143,863,538,793,000
|
Initialization of ConnManager
|
utilities/connmanagermqtt.py
|
__init__
|
connax-utim/uhost-micropython
|
python
|
def __init__(self):
'\n \n '
logging.info('Initializing ConnmanagerMQTT')
self.__connection = UConnMQTT()
self.__message_number = random.randint(0, 65536)
self.__sent_messages = dict()
self.__callback = None
self.__callback_object = None
|
def disconnect(self):
'\n Disconnection from server\n '
logging.info('Disconnecting...')
self.__connection.disconnect()
| 4,517,279,725,685,543,000
|
Disconnection from server
|
utilities/connmanagermqtt.py
|
disconnect
|
connax-utim/uhost-micropython
|
python
|
def disconnect(self):
'\n \n '
logging.info('Disconnecting...')
self.__connection.disconnect()
|
def subscribe(self, topic, callback_object, callback):
'\n Subscribe on topic\n\n :param str topic: Topic for subscription\n :param method callback: Callback for received message\n '
logging.info('Subscribing for {0}'.format(topic))
if (not callable(callback)):
raise exceptions.UtimUncallableCallbackError
self.__callback = callback
self.__callback_object = callback_object
self.__connection.subscribe(topic, self, ConnManagerMQTT._on_message)
| 1,995,031,518,274,948,600
|
Subscribe on topic
:param str topic: Topic for subscription
:param method callback: Callback for received message
|
utilities/connmanagermqtt.py
|
subscribe
|
connax-utim/uhost-micropython
|
python
|
def subscribe(self, topic, callback_object, callback):
'\n Subscribe on topic\n\n :param str topic: Topic for subscription\n :param method callback: Callback for received message\n '
logging.info('Subscribing for {0}'.format(topic))
if (not callable(callback)):
raise exceptions.UtimUncallableCallbackError
self.__callback = callback
self.__callback_object = callback_object
self.__connection.subscribe(topic, self, ConnManagerMQTT._on_message)
|
def unsubscribe(self, topic):
'\n Unsubscribe from topic\n\n :param str topic: Topic for subscription cancelling\n '
logging.info('Unsubscribing from {0}'.format(topic))
self.__connection.unsubscribe(topic)
| 6,705,252,433,999,876,000
|
Unsubscribe from topic
:param str topic: Topic for subscription cancelling
|
utilities/connmanagermqtt.py
|
unsubscribe
|
connax-utim/uhost-micropython
|
python
|
def unsubscribe(self, topic):
'\n Unsubscribe from topic\n\n :param str topic: Topic for subscription cancelling\n '
logging.info('Unsubscribing from {0}'.format(topic))
self.__connection.unsubscribe(topic)
|
def publish(self, sender, destination, message):
'\n Publish message\n\n :param sender: Message sender\n :param destination: Message destination\n :param message: The message\n '
id = self.__message_number
self.__message_number = ((self.__message_number + 1) % 65536)
out_message = ((b'\x01' + id.to_bytes(2, 'big')) + message)
logging.info('Publishing {0} to topic {1}'.format(message, destination))
self.__connection.publish(sender, destination, out_message)
self.__sent_messages[id] = {self._SENDER: sender, self._DESTINATION: destination, self._MESSAGE: message}
_thread.start_new_thread(self._republish, (id,))
| 3,824,178,535,247,461,000
|
Publish message
:param sender: Message sender
:param destination: Message destination
:param message: The message
|
utilities/connmanagermqtt.py
|
publish
|
connax-utim/uhost-micropython
|
python
|
def publish(self, sender, destination, message):
'\n Publish message\n\n :param sender: Message sender\n :param destination: Message destination\n :param message: The message\n '
id = self.__message_number
self.__message_number = ((self.__message_number + 1) % 65536)
out_message = ((b'\x01' + id.to_bytes(2, 'big')) + message)
logging.info('Publishing {0} to topic {1}'.format(message, destination))
self.__connection.publish(sender, destination, out_message)
self.__sent_messages[id] = {self._SENDER: sender, self._DESTINATION: destination, self._MESSAGE: message}
_thread.start_new_thread(self._republish, (id,))
|
def _republish(self, id):
'\n Check if message was delivered and republish if not\n\n :param id: Message ID\n '
logging.info('_publish for {0} started'.format(id))
time.sleep(10)
while (id in self.__sent_messages.keys()):
try:
logging.info("Message {0} wasn't delivered".format(id))
message = self.__sent_messages[id]
self.__connection.publish(message[self._SENDER], message[self._DESTINATION], ((b'\x01' + id.to_bytes(2, 'big')) + message[self._MESSAGE]))
time.sleep(5)
except KeyError:
logging.error('Message was already deleted from republish')
break
logging.info('Message {0} was delivered'.format(id))
| -3,382,346,000,680,288,000
|
Check if message was delivered and republish if not
:param id: Message ID
|
utilities/connmanagermqtt.py
|
_republish
|
connax-utim/uhost-micropython
|
python
|
def _republish(self, id):
'\n Check if message was delivered and republish if not\n\n :param id: Message ID\n '
logging.info('_publish for {0} started'.format(id))
time.sleep(10)
while (id in self.__sent_messages.keys()):
try:
logging.info("Message {0} wasn't delivered".format(id))
message = self.__sent_messages[id]
self.__connection.publish(message[self._SENDER], message[self._DESTINATION], ((b'\x01' + id.to_bytes(2, 'big')) + message[self._MESSAGE]))
time.sleep(5)
except KeyError:
logging.error('Message was already deleted from republish')
break
logging.info('Message {0} was delivered'.format(id))
|
def _on_message(self, sender, message):
'\n Message receiving callback\n\n :param sender: Message sender\n :param message: The message\n '
logging.info('Received message {0} from {1}'.format(message, sender))
if (len(message) < 3):
logging.info('Message is too short to be something!')
elif (message[:1] == b'\x02'):
try:
logging.info('Received ack, deleting message from sent')
id = int.from_bytes(message[1:3], 'big')
if (id in self.__sent_messages.keys()):
self.__sent_messages.pop(id)
except KeyError:
logging.error('Message was already deleted from republish')
else:
logging.info('Received message, sending ack...')
ack_message = (b'\x02' + message[1:3])
self.__connection.publish(b'ack', sender.decode(), ack_message)
self.__callback(self.__callback_object, sender, message[3:])
| -3,529,581,953,659,937,300
|
Message receiving callback
:param sender: Message sender
:param message: The message
|
utilities/connmanagermqtt.py
|
_on_message
|
connax-utim/uhost-micropython
|
python
|
def _on_message(self, sender, message):
'\n Message receiving callback\n\n :param sender: Message sender\n :param message: The message\n '
logging.info('Received message {0} from {1}'.format(message, sender))
if (len(message) < 3):
logging.info('Message is too short to be something!')
elif (message[:1] == b'\x02'):
try:
logging.info('Received ack, deleting message from sent')
id = int.from_bytes(message[1:3], 'big')
if (id in self.__sent_messages.keys()):
self.__sent_messages.pop(id)
except KeyError:
logging.error('Message was already deleted from republish')
else:
logging.info('Received message, sending ack...')
ack_message = (b'\x02' + message[1:3])
self.__connection.publish(b'ack', sender.decode(), ack_message)
self.__callback(self.__callback_object, sender, message[3:])
|
@click.command()
def main():
'Update SMARTER database statuses'
logger.info(f'{Path(__file__).name} started')
try:
database = SmarterInfo.objects.get(id='smarter')
logger.debug(f'Found: {database}')
except DoesNotExist:
logger.warning('Smarter database status was never tracked')
database = SmarterInfo(id='smarter')
database.version = __version__
database.working_assemblies = WORKING_ASSEMBLIES
database.plink_specie_opt = PLINK_SPECIES_OPT
database.last_updated = datetime.datetime.now()
database.save()
logger.info('Database status updated')
logger.info(f'{Path(__file__).name} ended')
| 3,191,944,132,278,295,000
|
Update SMARTER database statuses
|
src/data/update_db_status.py
|
main
|
cnr-ibba/SMARTER-database
|
python
|
@click.command()
def main():
logger.info(f'{Path(__file__).name} started')
try:
database = SmarterInfo.objects.get(id='smarter')
logger.debug(f'Found: {database}')
except DoesNotExist:
logger.warning('Smarter database status was never tracked')
database = SmarterInfo(id='smarter')
database.version = __version__
database.working_assemblies = WORKING_ASSEMBLIES
database.plink_specie_opt = PLINK_SPECIES_OPT
database.last_updated = datetime.datetime.now()
database.save()
logger.info('Database status updated')
logger.info(f'{Path(__file__).name} ended')
|
def __init__(self, generate_callback, update_callback, remove_callback, coordinates, filter_radius=None, filter_minimum_magnitude=None):
'Initialize the IGN Sismología Feed Manager.'
feed = IgnSismologiaFeed(coordinates, filter_radius=filter_radius, filter_minimum_magnitude=filter_minimum_magnitude)
super().__init__(feed, generate_callback, update_callback, remove_callback)
| 4,804,089,377,989,919,000
|
Initialize the IGN Sismología Feed Manager.
|
georss_ign_sismologia_client/__init__.py
|
__init__
|
exxamalte/python-georss-ign-sismologia-client
|
python
|
def __init__(self, generate_callback, update_callback, remove_callback, coordinates, filter_radius=None, filter_minimum_magnitude=None):
feed = IgnSismologiaFeed(coordinates, filter_radius=filter_radius, filter_minimum_magnitude=filter_minimum_magnitude)
super().__init__(feed, generate_callback, update_callback, remove_callback)
|
def __init__(self, home_coordinates, filter_radius=None, filter_minimum_magnitude=None):
'Initialise this service.'
super().__init__(home_coordinates, URL, filter_radius=filter_radius)
self._filter_minimum_magnitude = filter_minimum_magnitude
| 4,457,051,066,076,739,600
|
Initialise this service.
|
georss_ign_sismologia_client/__init__.py
|
__init__
|
exxamalte/python-georss-ign-sismologia-client
|
python
|
def __init__(self, home_coordinates, filter_radius=None, filter_minimum_magnitude=None):
super().__init__(home_coordinates, URL, filter_radius=filter_radius)
self._filter_minimum_magnitude = filter_minimum_magnitude
|
def __repr__(self):
'Return string representation of this feed.'
return '<{}(home={}, url={}, radius={}, magnitude={})>'.format(self.__class__.__name__, self._home_coordinates, self._url, self._filter_radius, self._filter_minimum_magnitude)
| 6,123,197,691,639,559,000
|
Return string representation of this feed.
|
georss_ign_sismologia_client/__init__.py
|
__repr__
|
exxamalte/python-georss-ign-sismologia-client
|
python
|
def __repr__(self):
return '<{}(home={}, url={}, radius={}, magnitude={})>'.format(self.__class__.__name__, self._home_coordinates, self._url, self._filter_radius, self._filter_minimum_magnitude)
|
def _new_entry(self, home_coordinates, rss_entry, global_data):
'Generate a new entry.'
return IgnSismologiaFeedEntry(home_coordinates, rss_entry)
| 4,687,324,556,794,112,000
|
Generate a new entry.
|
georss_ign_sismologia_client/__init__.py
|
_new_entry
|
exxamalte/python-georss-ign-sismologia-client
|
python
|
def _new_entry(self, home_coordinates, rss_entry, global_data):
return IgnSismologiaFeedEntry(home_coordinates, rss_entry)
|
def _filter_entries(self, entries):
'Filter the provided entries.'
entries = super()._filter_entries(entries)
if self._filter_minimum_magnitude:
return list(filter((lambda entry: (entry.magnitude and (entry.magnitude >= self._filter_minimum_magnitude))), entries))
return entries
| -890,736,604,661,526,400
|
Filter the provided entries.
|
georss_ign_sismologia_client/__init__.py
|
_filter_entries
|
exxamalte/python-georss-ign-sismologia-client
|
python
|
def _filter_entries(self, entries):
entries = super()._filter_entries(entries)
if self._filter_minimum_magnitude:
return list(filter((lambda entry: (entry.magnitude and (entry.magnitude >= self._filter_minimum_magnitude))), entries))
return entries
|
def __init__(self, home_coordinates, rss_entry):
'Initialise this service.'
super().__init__(home_coordinates, rss_entry)
| 7,812,088,943,368,219,000
|
Initialise this service.
|
georss_ign_sismologia_client/__init__.py
|
__init__
|
exxamalte/python-georss-ign-sismologia-client
|
python
|
def __init__(self, home_coordinates, rss_entry):
super().__init__(home_coordinates, rss_entry)
|
@property
def attribution(self) -> str:
'Return the attribution of this entry.'
return ATTRIBUTION
| 6,532,099,834,685,233,000
|
Return the attribution of this entry.
|
georss_ign_sismologia_client/__init__.py
|
attribution
|
exxamalte/python-georss-ign-sismologia-client
|
python
|
@property
def attribution(self) -> str:
return ATTRIBUTION
|
@property
def published(self) -> Optional[datetime]:
'Return the published date of this entry.'
published_date = self._search_in_title(REGEXP_ATTR_PUBLISHED_DATE)
if published_date:
published_date = dateparser.parse(published_date)
return published_date
| 1,931,088,764,418,862,300
|
Return the published date of this entry.
|
georss_ign_sismologia_client/__init__.py
|
published
|
exxamalte/python-georss-ign-sismologia-client
|
python
|
@property
def published(self) -> Optional[datetime]:
published_date = self._search_in_title(REGEXP_ATTR_PUBLISHED_DATE)
if published_date:
published_date = dateparser.parse(published_date)
return published_date
|
@property
def magnitude(self) -> Optional[float]:
'Return the magnitude of this entry.'
magnitude = self._search_in_description(REGEXP_ATTR_MAGNITUDE)
if magnitude:
magnitude = float(magnitude)
return magnitude
| -2,808,530,826,421,641,700
|
Return the magnitude of this entry.
|
georss_ign_sismologia_client/__init__.py
|
magnitude
|
exxamalte/python-georss-ign-sismologia-client
|
python
|
@property
def magnitude(self) -> Optional[float]:
magnitude = self._search_in_description(REGEXP_ATTR_MAGNITUDE)
if magnitude:
magnitude = float(magnitude)
return magnitude
|
@property
def region(self) -> Optional[float]:
'Return the region of this entry.'
return self._search_in_description(REGEXP_ATTR_REGION)
| 8,086,530,032,019,997,000
|
Return the region of this entry.
|
georss_ign_sismologia_client/__init__.py
|
region
|
exxamalte/python-georss-ign-sismologia-client
|
python
|
@property
def region(self) -> Optional[float]:
return self._search_in_description(REGEXP_ATTR_REGION)
|
def _short_id(self) -> Optional[str]:
'Return the short id of this entry.'
return self._search_in_external_id(REGEXP_ATTR_SHORT_ID)
| -8,168,063,447,618,311,000
|
Return the short id of this entry.
|
georss_ign_sismologia_client/__init__.py
|
_short_id
|
exxamalte/python-georss-ign-sismologia-client
|
python
|
def _short_id(self) -> Optional[str]:
return self._search_in_external_id(REGEXP_ATTR_SHORT_ID)
|
@property
def image_url(self) -> Optional[str]:
'Return the image url of this entry.'
short_id = self._short_id()
if short_id:
return IMAGE_URL_PATTERN.format(short_id)
return None
| -2,840,390,393,779,878,400
|
Return the image url of this entry.
|
georss_ign_sismologia_client/__init__.py
|
image_url
|
exxamalte/python-georss-ign-sismologia-client
|
python
|
@property
def image_url(self) -> Optional[str]:
short_id = self._short_id()
if short_id:
return IMAGE_URL_PATTERN.format(short_id)
return None
|
@click.group()
def cli():
'\n The tethys CLI for managing your environment.\n '
| -2,337,295,908,500,185,600
|
The tethys CLI for managing your environment.
|
tethys/bin/cli.py
|
cli
|
tethys-platform/tethys
|
python
|
@click.group()
def cli():
'\n \n '
|
@cli.group(name='apps')
def apps_entry():
'\n Tethys apps manager\n '
| 3,940,248,023,208,902,000
|
Tethys apps manager
|
tethys/bin/cli.py
|
apps_entry
|
tethys-platform/tethys
|
python
|
@cli.group(name='apps')
def apps_entry():
'\n \n '
|
def user_cache_dir(appname):
'\n Return full path to the user-specific cache dir for this application.\n\n "appname" is the name of application.\n\n Typical user cache directories are:\n Mac OS X: ~/Library/Caches/<AppName>\n Unix: ~/.cache/<AppName> (XDG default)\n Windows: C:\\Users\\<username>\\AppData\\Local\\<AppName>\\Cache\n\n On Windows the only suggestion in the MSDN docs is that local settings go\n in the `CSIDL_LOCAL_APPDATA` directory. This is identical to the\n non-roaming app data dir (the default returned by `user_data_dir`). Apps\n typically put cache data somewhere *under* the given dir here. Some\n examples:\n ...\\Mozilla\\Firefox\\Profiles\\<ProfileName>\\Cache\n ...\\Acme\\SuperApp\\Cache\\1.0\n\n OPINION: This function appends "Cache" to the `CSIDL_LOCAL_APPDATA` value.\n '
if (sys.platform == 'win32'):
path = os.path.normpath(_get_win_folder('CSIDL_LOCAL_APPDATA'))
path = os.path.join(path, appname, 'Cache')
elif (sys.platform == 'darwin'):
path = os.path.expanduser('~/Library/Caches')
path = os.path.join(path, appname)
else:
path = os.getenv('XDG_CACHE_HOME', os.path.expanduser('~/.cache'))
path = os.path.join(path, appname)
return path
| 8,495,666,354,142,405,000
|
Return full path to the user-specific cache dir for this application.
"appname" is the name of application.
Typical user cache directories are:
Mac OS X: ~/Library/Caches/<AppName>
Unix: ~/.cache/<AppName> (XDG default)
Windows: C:\Users\<username>\AppData\Local\<AppName>\Cache
On Windows the only suggestion in the MSDN docs is that local settings go
in the `CSIDL_LOCAL_APPDATA` directory. This is identical to the
non-roaming app data dir (the default returned by `user_data_dir`). Apps
typically put cache data somewhere *under* the given dir here. Some
examples:
...\Mozilla\Firefox\Profiles\<ProfileName>\Cache
...\Acme\SuperApp\Cache\1.0
OPINION: This function appends "Cache" to the `CSIDL_LOCAL_APPDATA` value.
|
pip/appdirs.py
|
user_cache_dir
|
chadrik/pip
|
python
|
def user_cache_dir(appname):
'\n Return full path to the user-specific cache dir for this application.\n\n "appname" is the name of application.\n\n Typical user cache directories are:\n Mac OS X: ~/Library/Caches/<AppName>\n Unix: ~/.cache/<AppName> (XDG default)\n Windows: C:\\Users\\<username>\\AppData\\Local\\<AppName>\\Cache\n\n On Windows the only suggestion in the MSDN docs is that local settings go\n in the `CSIDL_LOCAL_APPDATA` directory. This is identical to the\n non-roaming app data dir (the default returned by `user_data_dir`). Apps\n typically put cache data somewhere *under* the given dir here. Some\n examples:\n ...\\Mozilla\\Firefox\\Profiles\\<ProfileName>\\Cache\n ...\\Acme\\SuperApp\\Cache\\1.0\n\n OPINION: This function appends "Cache" to the `CSIDL_LOCAL_APPDATA` value.\n '
if (sys.platform == 'win32'):
path = os.path.normpath(_get_win_folder('CSIDL_LOCAL_APPDATA'))
path = os.path.join(path, appname, 'Cache')
elif (sys.platform == 'darwin'):
path = os.path.expanduser('~/Library/Caches')
path = os.path.join(path, appname)
else:
path = os.getenv('XDG_CACHE_HOME', os.path.expanduser('~/.cache'))
path = os.path.join(path, appname)
return path
|
def _get_win_folder_from_registry(csidl_name):
"\n This is a fallback technique at best. I'm not sure if using the\n registry for this guarantees us the correct answer for all CSIDL_*\n names.\n "
import _winreg
shell_folder_name = {'CSIDL_APPDATA': 'AppData', 'CSIDL_COMMON_APPDATA': 'Common AppData', 'CSIDL_LOCAL_APPDATA': 'Local AppData'}[csidl_name]
key = _winreg.OpenKey(_winreg.HKEY_CURRENT_USER, 'Software\\Microsoft\\Windows\\CurrentVersion\\Explorer\\Shell Folders')
(directory, _type) = _winreg.QueryValueEx(key, shell_folder_name)
return directory
| -5,892,240,489,297,082,000
|
This is a fallback technique at best. I'm not sure if using the
registry for this guarantees us the correct answer for all CSIDL_*
names.
|
pip/appdirs.py
|
_get_win_folder_from_registry
|
chadrik/pip
|
python
|
def _get_win_folder_from_registry(csidl_name):
"\n This is a fallback technique at best. I'm not sure if using the\n registry for this guarantees us the correct answer for all CSIDL_*\n names.\n "
import _winreg
shell_folder_name = {'CSIDL_APPDATA': 'AppData', 'CSIDL_COMMON_APPDATA': 'Common AppData', 'CSIDL_LOCAL_APPDATA': 'Local AppData'}[csidl_name]
key = _winreg.OpenKey(_winreg.HKEY_CURRENT_USER, 'Software\\Microsoft\\Windows\\CurrentVersion\\Explorer\\Shell Folders')
(directory, _type) = _winreg.QueryValueEx(key, shell_folder_name)
return directory
|
def compress(input_file_path, output_file_path, level=0, method=1):
'Function to compress PDF via Ghostscript command line interface'
quality = {0: '/default', 1: '/prepress', 2: '/printer', 3: '/ebook', 4: '/screen'}
if (not os.path.isfile(input_file_path)):
print(f'Error: invalid path for input file: {input_file_path}')
sys.exit(1)
if (input_file_path.split('.')[(- 1)].lower() != 'pdf'):
print(f'Error: input file is not a PDF: {input_file_path}')
sys.exit(1)
gs = get_ghostscript_path()
file_name = input_file_path.split('/')[(- 1)]
print('Compressing PDF "{}"...'.format(file_name))
if (method == 1):
cmd = [gs, '-sDEVICE=pdfwrite', '-dNumRenderingThreads=2', '-dPDFSETTINGS={}'.format(quality[level]), '-dCompatibilityLevel=1.5', '-dNOPAUSE', '-dQUIET', '-dBATCH', '-dSAFER', '-dSubsetFonts=true', '-dCompressFonts=true', '-dEmbedAllFonts=true', '-sProcessColorModel=DeviceRGB', '-sColorConversionStrategy=RGB', '-sColorConversionStrategyForImages=RGB', '-dConvertCMYKImagesToRGB=true', '-dDetectDuplicateImages=true', '-dColorImageDownsampleType=/Bicubic', '-dColorImageResolution=300', '-dGrayImageDownsampleType=/Bicubic', '-dGrayImageResolution=300', '-dMonoImageDownsampleType=/Subsample', '-dMonoImageResolution=300', '-dDownsampleColorImages=true', '-dDoThumbnails=false', '-dCreateJobTicket=false', '-dPreserveEPSInfo=false', '-dPreserveOPIComments=false', '-dPreserveOverprintSettings=false', '-dUCRandBGInfo=/Remove', '-sOutputFile={}'.format(output_file_path), input_file_path]
elif (method == 2):
cmd = [gs, '-sDEVICE=pdfwrite', '-dNumRenderingThreads=2', '-dPDFSETTINGS={}'.format(quality[level]), '-dCompatibilityLevel=1.4', '-dNOPAUSE', '-dQUIET', '-dBATCH', '-dSAFER', '-dDetectDuplicateImages=true', '-sOutputFile={}'.format(output_file_path), input_file_path]
try:
subprocess.call(cmd, stderr=sys.stdout)
except:
print(' '.join(cmd))
if (not os.path.exists(output_file_path)):
raise Exception(f'''Ghostscript failed to create {output_file_path}, time to debug...
''', ' '.join(cmd))
initial_size = round((os.path.getsize(input_file_path) / (1024 * 1024)), 2)
final_size = round((os.path.getsize(output_file_path) / (1024 * 1024)), 2)
ratio = round((100 - ((final_size / initial_size) * 100)), 1)
print(f'Initial file size is {initial_size}MB', f'; Final file size is {final_size}MB', f'''; Compression Ratio = {ratio}%
''')
if ((final_size > initial_size) and (method == 1)):
print(('-' * 100))
print('Compression Failed\nTrying another ghostscript compression method...')
print(('-' * 100))
info = compress(input_file_path, output_file_path, 4, 2)
initial_size = info[0]
final_size = info[1]
ratio = info[2]
return [initial_size, final_size, ratio]
| 978,765,344,947,813,200
|
Function to compress PDF via Ghostscript command line interface
|
gs_compress.py
|
compress
|
brio50/groups-io
|
python
|
def compress(input_file_path, output_file_path, level=0, method=1):
quality = {0: '/default', 1: '/prepress', 2: '/printer', 3: '/ebook', 4: '/screen'}
if (not os.path.isfile(input_file_path)):
print(f'Error: invalid path for input file: {input_file_path}')
sys.exit(1)
if (input_file_path.split('.')[(- 1)].lower() != 'pdf'):
print(f'Error: input file is not a PDF: {input_file_path}')
sys.exit(1)
gs = get_ghostscript_path()
file_name = input_file_path.split('/')[(- 1)]
print('Compressing PDF "{}"...'.format(file_name))
if (method == 1):
cmd = [gs, '-sDEVICE=pdfwrite', '-dNumRenderingThreads=2', '-dPDFSETTINGS={}'.format(quality[level]), '-dCompatibilityLevel=1.5', '-dNOPAUSE', '-dQUIET', '-dBATCH', '-dSAFER', '-dSubsetFonts=true', '-dCompressFonts=true', '-dEmbedAllFonts=true', '-sProcessColorModel=DeviceRGB', '-sColorConversionStrategy=RGB', '-sColorConversionStrategyForImages=RGB', '-dConvertCMYKImagesToRGB=true', '-dDetectDuplicateImages=true', '-dColorImageDownsampleType=/Bicubic', '-dColorImageResolution=300', '-dGrayImageDownsampleType=/Bicubic', '-dGrayImageResolution=300', '-dMonoImageDownsampleType=/Subsample', '-dMonoImageResolution=300', '-dDownsampleColorImages=true', '-dDoThumbnails=false', '-dCreateJobTicket=false', '-dPreserveEPSInfo=false', '-dPreserveOPIComments=false', '-dPreserveOverprintSettings=false', '-dUCRandBGInfo=/Remove', '-sOutputFile={}'.format(output_file_path), input_file_path]
elif (method == 2):
cmd = [gs, '-sDEVICE=pdfwrite', '-dNumRenderingThreads=2', '-dPDFSETTINGS={}'.format(quality[level]), '-dCompatibilityLevel=1.4', '-dNOPAUSE', '-dQUIET', '-dBATCH', '-dSAFER', '-dDetectDuplicateImages=true', '-sOutputFile={}'.format(output_file_path), input_file_path]
try:
subprocess.call(cmd, stderr=sys.stdout)
except:
print(' '.join(cmd))
if (not os.path.exists(output_file_path)):
raise Exception(f'Ghostscript failed to create {output_file_path}, time to debug...
', ' '.join(cmd))
initial_size = round((os.path.getsize(input_file_path) / (1024 * 1024)), 2)
final_size = round((os.path.getsize(output_file_path) / (1024 * 1024)), 2)
ratio = round((100 - ((final_size / initial_size) * 100)), 1)
print(f'Initial file size is {initial_size}MB', f'; Final file size is {final_size}MB', f'; Compression Ratio = {ratio}%
')
if ((final_size > initial_size) and (method == 1)):
print(('-' * 100))
print('Compression Failed\nTrying another ghostscript compression method...')
print(('-' * 100))
info = compress(input_file_path, output_file_path, 4, 2)
initial_size = info[0]
final_size = info[1]
ratio = info[2]
return [initial_size, final_size, ratio]
|
def get_transport_class(cls, label: str=None) -> Type[UserEventServiceTransport]:
'Returns an appropriate transport class.\n\n Args:\n label: The name of the desired transport. If none is\n provided, then the first transport in the registry is used.\n\n Returns:\n The transport class to use.\n '
if label:
return cls._transport_registry[label]
return next(iter(cls._transport_registry.values()))
| -8,486,663,818,495,446,000
|
Returns an appropriate transport class.
Args:
label: The name of the desired transport. If none is
provided, then the first transport in the registry is used.
Returns:
The transport class to use.
|
google/cloud/recommendationengine/v1beta1/recommendationengine-v1beta1-py/google/cloud/recommendationengine_v1beta1/services/user_event_service/client.py
|
get_transport_class
|
googleapis/googleapis-gen
|
python
|
def get_transport_class(cls, label: str=None) -> Type[UserEventServiceTransport]:
'Returns an appropriate transport class.\n\n Args:\n label: The name of the desired transport. If none is\n provided, then the first transport in the registry is used.\n\n Returns:\n The transport class to use.\n '
if label:
return cls._transport_registry[label]
return next(iter(cls._transport_registry.values()))
|
@staticmethod
def _get_default_mtls_endpoint(api_endpoint):
'Converts api endpoint to mTLS endpoint.\n\n Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to\n "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively.\n Args:\n api_endpoint (Optional[str]): the api endpoint to convert.\n Returns:\n str: converted mTLS api endpoint.\n '
if (not api_endpoint):
return api_endpoint
mtls_endpoint_re = re.compile('(?P<name>[^.]+)(?P<mtls>\\.mtls)?(?P<sandbox>\\.sandbox)?(?P<googledomain>\\.googleapis\\.com)?')
m = mtls_endpoint_re.match(api_endpoint)
(name, mtls, sandbox, googledomain) = m.groups()
if (mtls or (not googledomain)):
return api_endpoint
if sandbox:
return api_endpoint.replace('sandbox.googleapis.com', 'mtls.sandbox.googleapis.com')
return api_endpoint.replace('.googleapis.com', '.mtls.googleapis.com')
| 7,533,698,565,164,944,000
|
Converts api endpoint to mTLS endpoint.
Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to
"*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively.
Args:
api_endpoint (Optional[str]): the api endpoint to convert.
Returns:
str: converted mTLS api endpoint.
|
google/cloud/recommendationengine/v1beta1/recommendationengine-v1beta1-py/google/cloud/recommendationengine_v1beta1/services/user_event_service/client.py
|
_get_default_mtls_endpoint
|
googleapis/googleapis-gen
|
python
|
@staticmethod
def _get_default_mtls_endpoint(api_endpoint):
'Converts api endpoint to mTLS endpoint.\n\n Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to\n "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively.\n Args:\n api_endpoint (Optional[str]): the api endpoint to convert.\n Returns:\n str: converted mTLS api endpoint.\n '
if (not api_endpoint):
return api_endpoint
mtls_endpoint_re = re.compile('(?P<name>[^.]+)(?P<mtls>\\.mtls)?(?P<sandbox>\\.sandbox)?(?P<googledomain>\\.googleapis\\.com)?')
m = mtls_endpoint_re.match(api_endpoint)
(name, mtls, sandbox, googledomain) = m.groups()
if (mtls or (not googledomain)):
return api_endpoint
if sandbox:
return api_endpoint.replace('sandbox.googleapis.com', 'mtls.sandbox.googleapis.com')
return api_endpoint.replace('.googleapis.com', '.mtls.googleapis.com')
|
@classmethod
def from_service_account_info(cls, info: dict, *args, **kwargs):
'Creates an instance of this client using the provided credentials\n info.\n\n Args:\n info (dict): The service account private key info.\n args: Additional arguments to pass to the constructor.\n kwargs: Additional arguments to pass to the constructor.\n\n Returns:\n UserEventServiceClient: The constructed client.\n '
credentials = service_account.Credentials.from_service_account_info(info)
kwargs['credentials'] = credentials
return cls(*args, **kwargs)
| -8,069,945,373,802,076,000
|
Creates an instance of this client using the provided credentials
info.
Args:
info (dict): The service account private key info.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
UserEventServiceClient: The constructed client.
|
google/cloud/recommendationengine/v1beta1/recommendationengine-v1beta1-py/google/cloud/recommendationengine_v1beta1/services/user_event_service/client.py
|
from_service_account_info
|
googleapis/googleapis-gen
|
python
|
@classmethod
def from_service_account_info(cls, info: dict, *args, **kwargs):
'Creates an instance of this client using the provided credentials\n info.\n\n Args:\n info (dict): The service account private key info.\n args: Additional arguments to pass to the constructor.\n kwargs: Additional arguments to pass to the constructor.\n\n Returns:\n UserEventServiceClient: The constructed client.\n '
credentials = service_account.Credentials.from_service_account_info(info)
kwargs['credentials'] = credentials
return cls(*args, **kwargs)
|
@classmethod
def from_service_account_file(cls, filename: str, *args, **kwargs):
'Creates an instance of this client using the provided credentials\n file.\n\n Args:\n filename (str): The path to the service account private key json\n file.\n args: Additional arguments to pass to the constructor.\n kwargs: Additional arguments to pass to the constructor.\n\n Returns:\n UserEventServiceClient: The constructed client.\n '
credentials = service_account.Credentials.from_service_account_file(filename)
kwargs['credentials'] = credentials
return cls(*args, **kwargs)
| 7,707,715,727,642,763,000
|
Creates an instance of this client using the provided credentials
file.
Args:
filename (str): The path to the service account private key json
file.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
UserEventServiceClient: The constructed client.
|
google/cloud/recommendationengine/v1beta1/recommendationengine-v1beta1-py/google/cloud/recommendationengine_v1beta1/services/user_event_service/client.py
|
from_service_account_file
|
googleapis/googleapis-gen
|
python
|
@classmethod
def from_service_account_file(cls, filename: str, *args, **kwargs):
'Creates an instance of this client using the provided credentials\n file.\n\n Args:\n filename (str): The path to the service account private key json\n file.\n args: Additional arguments to pass to the constructor.\n kwargs: Additional arguments to pass to the constructor.\n\n Returns:\n UserEventServiceClient: The constructed client.\n '
credentials = service_account.Credentials.from_service_account_file(filename)
kwargs['credentials'] = credentials
return cls(*args, **kwargs)
|
@property
def transport(self) -> UserEventServiceTransport:
'Returns the transport used by the client instance.\n\n Returns:\n UserEventServiceTransport: The transport used by the client\n instance.\n '
return self._transport
| -8,178,801,226,573,250,000
|
Returns the transport used by the client instance.
Returns:
UserEventServiceTransport: The transport used by the client
instance.
|
google/cloud/recommendationengine/v1beta1/recommendationengine-v1beta1-py/google/cloud/recommendationengine_v1beta1/services/user_event_service/client.py
|
transport
|
googleapis/googleapis-gen
|
python
|
@property
def transport(self) -> UserEventServiceTransport:
'Returns the transport used by the client instance.\n\n Returns:\n UserEventServiceTransport: The transport used by the client\n instance.\n '
return self._transport
|
@staticmethod
def event_store_path(project: str, location: str, catalog: str, event_store: str) -> str:
'Returns a fully-qualified event_store string.'
return 'projects/{project}/locations/{location}/catalogs/{catalog}/eventStores/{event_store}'.format(project=project, location=location, catalog=catalog, event_store=event_store)
| -2,980,082,045,436,697,000
|
Returns a fully-qualified event_store string.
|
google/cloud/recommendationengine/v1beta1/recommendationengine-v1beta1-py/google/cloud/recommendationengine_v1beta1/services/user_event_service/client.py
|
event_store_path
|
googleapis/googleapis-gen
|
python
|
@staticmethod
def event_store_path(project: str, location: str, catalog: str, event_store: str) -> str:
return 'projects/{project}/locations/{location}/catalogs/{catalog}/eventStores/{event_store}'.format(project=project, location=location, catalog=catalog, event_store=event_store)
|
@staticmethod
def parse_event_store_path(path: str) -> Dict[(str, str)]:
'Parses a event_store path into its component segments.'
m = re.match('^projects/(?P<project>.+?)/locations/(?P<location>.+?)/catalogs/(?P<catalog>.+?)/eventStores/(?P<event_store>.+?)$', path)
return (m.groupdict() if m else {})
| 8,316,049,900,430,091,000
|
Parses a event_store path into its component segments.
|
google/cloud/recommendationengine/v1beta1/recommendationengine-v1beta1-py/google/cloud/recommendationengine_v1beta1/services/user_event_service/client.py
|
parse_event_store_path
|
googleapis/googleapis-gen
|
python
|
@staticmethod
def parse_event_store_path(path: str) -> Dict[(str, str)]:
m = re.match('^projects/(?P<project>.+?)/locations/(?P<location>.+?)/catalogs/(?P<catalog>.+?)/eventStores/(?P<event_store>.+?)$', path)
return (m.groupdict() if m else {})
|
@staticmethod
def common_billing_account_path(billing_account: str) -> str:
'Returns a fully-qualified billing_account string.'
return 'billingAccounts/{billing_account}'.format(billing_account=billing_account)
| 5,123,899,605,328,763,000
|
Returns a fully-qualified billing_account string.
|
google/cloud/recommendationengine/v1beta1/recommendationengine-v1beta1-py/google/cloud/recommendationengine_v1beta1/services/user_event_service/client.py
|
common_billing_account_path
|
googleapis/googleapis-gen
|
python
|
@staticmethod
def common_billing_account_path(billing_account: str) -> str:
return 'billingAccounts/{billing_account}'.format(billing_account=billing_account)
|
@staticmethod
def parse_common_billing_account_path(path: str) -> Dict[(str, str)]:
'Parse a billing_account path into its component segments.'
m = re.match('^billingAccounts/(?P<billing_account>.+?)$', path)
return (m.groupdict() if m else {})
| 3,539,036,522,285,068,000
|
Parse a billing_account path into its component segments.
|
google/cloud/recommendationengine/v1beta1/recommendationengine-v1beta1-py/google/cloud/recommendationengine_v1beta1/services/user_event_service/client.py
|
parse_common_billing_account_path
|
googleapis/googleapis-gen
|
python
|
@staticmethod
def parse_common_billing_account_path(path: str) -> Dict[(str, str)]:
m = re.match('^billingAccounts/(?P<billing_account>.+?)$', path)
return (m.groupdict() if m else {})
|
@staticmethod
def common_folder_path(folder: str) -> str:
'Returns a fully-qualified folder string.'
return 'folders/{folder}'.format(folder=folder)
| -6,142,497,583,881,718,000
|
Returns a fully-qualified folder string.
|
google/cloud/recommendationengine/v1beta1/recommendationengine-v1beta1-py/google/cloud/recommendationengine_v1beta1/services/user_event_service/client.py
|
common_folder_path
|
googleapis/googleapis-gen
|
python
|
@staticmethod
def common_folder_path(folder: str) -> str:
return 'folders/{folder}'.format(folder=folder)
|
@staticmethod
def parse_common_folder_path(path: str) -> Dict[(str, str)]:
'Parse a folder path into its component segments.'
m = re.match('^folders/(?P<folder>.+?)$', path)
return (m.groupdict() if m else {})
| 7,731,323,619,502,445,000
|
Parse a folder path into its component segments.
|
google/cloud/recommendationengine/v1beta1/recommendationengine-v1beta1-py/google/cloud/recommendationengine_v1beta1/services/user_event_service/client.py
|
parse_common_folder_path
|
googleapis/googleapis-gen
|
python
|
@staticmethod
def parse_common_folder_path(path: str) -> Dict[(str, str)]:
m = re.match('^folders/(?P<folder>.+?)$', path)
return (m.groupdict() if m else {})
|
@staticmethod
def common_organization_path(organization: str) -> str:
'Returns a fully-qualified organization string.'
return 'organizations/{organization}'.format(organization=organization)
| -1,733,580,681,013,462,000
|
Returns a fully-qualified organization string.
|
google/cloud/recommendationengine/v1beta1/recommendationengine-v1beta1-py/google/cloud/recommendationengine_v1beta1/services/user_event_service/client.py
|
common_organization_path
|
googleapis/googleapis-gen
|
python
|
@staticmethod
def common_organization_path(organization: str) -> str:
return 'organizations/{organization}'.format(organization=organization)
|
@staticmethod
def parse_common_organization_path(path: str) -> Dict[(str, str)]:
'Parse a organization path into its component segments.'
m = re.match('^organizations/(?P<organization>.+?)$', path)
return (m.groupdict() if m else {})
| 6,176,747,584,094,183,000
|
Parse a organization path into its component segments.
|
google/cloud/recommendationengine/v1beta1/recommendationengine-v1beta1-py/google/cloud/recommendationengine_v1beta1/services/user_event_service/client.py
|
parse_common_organization_path
|
googleapis/googleapis-gen
|
python
|
@staticmethod
def parse_common_organization_path(path: str) -> Dict[(str, str)]:
m = re.match('^organizations/(?P<organization>.+?)$', path)
return (m.groupdict() if m else {})
|
@staticmethod
def common_project_path(project: str) -> str:
'Returns a fully-qualified project string.'
return 'projects/{project}'.format(project=project)
| -124,327,816,620,303,040
|
Returns a fully-qualified project string.
|
google/cloud/recommendationengine/v1beta1/recommendationengine-v1beta1-py/google/cloud/recommendationengine_v1beta1/services/user_event_service/client.py
|
common_project_path
|
googleapis/googleapis-gen
|
python
|
@staticmethod
def common_project_path(project: str) -> str:
return 'projects/{project}'.format(project=project)
|
@staticmethod
def parse_common_project_path(path: str) -> Dict[(str, str)]:
'Parse a project path into its component segments.'
m = re.match('^projects/(?P<project>.+?)$', path)
return (m.groupdict() if m else {})
| -6,609,324,249,468,844,000
|
Parse a project path into its component segments.
|
google/cloud/recommendationengine/v1beta1/recommendationengine-v1beta1-py/google/cloud/recommendationengine_v1beta1/services/user_event_service/client.py
|
parse_common_project_path
|
googleapis/googleapis-gen
|
python
|
@staticmethod
def parse_common_project_path(path: str) -> Dict[(str, str)]:
m = re.match('^projects/(?P<project>.+?)$', path)
return (m.groupdict() if m else {})
|
@staticmethod
def common_location_path(project: str, location: str) -> str:
'Returns a fully-qualified location string.'
return 'projects/{project}/locations/{location}'.format(project=project, location=location)
| 8,215,176,652,370,049,000
|
Returns a fully-qualified location string.
|
google/cloud/recommendationengine/v1beta1/recommendationengine-v1beta1-py/google/cloud/recommendationengine_v1beta1/services/user_event_service/client.py
|
common_location_path
|
googleapis/googleapis-gen
|
python
|
@staticmethod
def common_location_path(project: str, location: str) -> str:
return 'projects/{project}/locations/{location}'.format(project=project, location=location)
|
@staticmethod
def parse_common_location_path(path: str) -> Dict[(str, str)]:
'Parse a location path into its component segments.'
m = re.match('^projects/(?P<project>.+?)/locations/(?P<location>.+?)$', path)
return (m.groupdict() if m else {})
| 1,703,235,435,027,079,400
|
Parse a location path into its component segments.
|
google/cloud/recommendationengine/v1beta1/recommendationengine-v1beta1-py/google/cloud/recommendationengine_v1beta1/services/user_event_service/client.py
|
parse_common_location_path
|
googleapis/googleapis-gen
|
python
|
@staticmethod
def parse_common_location_path(path: str) -> Dict[(str, str)]:
m = re.match('^projects/(?P<project>.+?)/locations/(?P<location>.+?)$', path)
return (m.groupdict() if m else {})
|
def __init__(self, *, credentials: Optional[ga_credentials.Credentials]=None, transport: Union[(str, UserEventServiceTransport, None)]=None, client_options: Optional[client_options_lib.ClientOptions]=None, client_info: gapic_v1.client_info.ClientInfo=DEFAULT_CLIENT_INFO) -> None:
'Instantiates the user event service client.\n\n Args:\n credentials (Optional[google.auth.credentials.Credentials]): The\n authorization credentials to attach to requests. These\n credentials identify the application to the service; if none\n are specified, the client will attempt to ascertain the\n credentials from the environment.\n transport (Union[str, UserEventServiceTransport]): The\n transport to use. If set to None, a transport is chosen\n automatically.\n client_options (google.api_core.client_options.ClientOptions): Custom options for the\n client. It won\'t take effect if a ``transport`` instance is provided.\n (1) The ``api_endpoint`` property can be used to override the\n default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT\n environment variable can also be used to override the endpoint:\n "always" (always use the default mTLS endpoint), "never" (always\n use the default regular endpoint) and "auto" (auto switch to the\n default mTLS endpoint if client certificate is present, this is\n the default value). However, the ``api_endpoint`` property takes\n precedence if provided.\n (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable\n is "true", then the ``client_cert_source`` property can be used\n to provide client certificate for mutual TLS transport. If\n not provided, the default SSL client certificate will be used if\n present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not\n set, no client certificate will be used.\n client_info (google.api_core.gapic_v1.client_info.ClientInfo):\n The client info used to send a user-agent string along with\n API requests. If ``None``, then default info will be used.\n Generally, you only need to set this if you\'re developing\n your own client library.\n\n Raises:\n google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport\n creation failed for any reason.\n '
if isinstance(client_options, dict):
client_options = client_options_lib.from_dict(client_options)
if (client_options is None):
client_options = client_options_lib.ClientOptions()
use_client_cert = bool(util.strtobool(os.getenv('GOOGLE_API_USE_CLIENT_CERTIFICATE', 'false')))
client_cert_source_func = None
is_mtls = False
if use_client_cert:
if client_options.client_cert_source:
is_mtls = True
client_cert_source_func = client_options.client_cert_source
else:
is_mtls = mtls.has_default_client_cert_source()
if is_mtls:
client_cert_source_func = mtls.default_client_cert_source()
else:
client_cert_source_func = None
if (client_options.api_endpoint is not None):
api_endpoint = client_options.api_endpoint
else:
use_mtls_env = os.getenv('GOOGLE_API_USE_MTLS_ENDPOINT', 'auto')
if (use_mtls_env == 'never'):
api_endpoint = self.DEFAULT_ENDPOINT
elif (use_mtls_env == 'always'):
api_endpoint = self.DEFAULT_MTLS_ENDPOINT
elif (use_mtls_env == 'auto'):
if is_mtls:
api_endpoint = self.DEFAULT_MTLS_ENDPOINT
else:
api_endpoint = self.DEFAULT_ENDPOINT
else:
raise MutualTLSChannelError('Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted values: never, auto, always')
if isinstance(transport, UserEventServiceTransport):
if (credentials or client_options.credentials_file):
raise ValueError('When providing a transport instance, provide its credentials directly.')
if client_options.scopes:
raise ValueError('When providing a transport instance, provide its scopes directly.')
self._transport = transport
else:
Transport = type(self).get_transport_class(transport)
self._transport = Transport(credentials=credentials, credentials_file=client_options.credentials_file, host=api_endpoint, scopes=client_options.scopes, client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, always_use_jwt_access=True)
| -9,065,012,600,399,228,000
|
Instantiates the user event service client.
Args:
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
transport (Union[str, UserEventServiceTransport]): The
transport to use. If set to None, a transport is chosen
automatically.
client_options (google.api_core.client_options.ClientOptions): Custom options for the
client. It won't take effect if a ``transport`` instance is provided.
(1) The ``api_endpoint`` property can be used to override the
default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
environment variable can also be used to override the endpoint:
"always" (always use the default mTLS endpoint), "never" (always
use the default regular endpoint) and "auto" (auto switch to the
default mTLS endpoint if client certificate is present, this is
the default value). However, the ``api_endpoint`` property takes
precedence if provided.
(2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
is "true", then the ``client_cert_source`` property can be used
to provide client certificate for mutual TLS transport. If
not provided, the default SSL client certificate will be used if
present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
set, no client certificate will be used.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
Raises:
google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
creation failed for any reason.
|
google/cloud/recommendationengine/v1beta1/recommendationengine-v1beta1-py/google/cloud/recommendationengine_v1beta1/services/user_event_service/client.py
|
__init__
|
googleapis/googleapis-gen
|
python
|
def __init__(self, *, credentials: Optional[ga_credentials.Credentials]=None, transport: Union[(str, UserEventServiceTransport, None)]=None, client_options: Optional[client_options_lib.ClientOptions]=None, client_info: gapic_v1.client_info.ClientInfo=DEFAULT_CLIENT_INFO) -> None:
'Instantiates the user event service client.\n\n Args:\n credentials (Optional[google.auth.credentials.Credentials]): The\n authorization credentials to attach to requests. These\n credentials identify the application to the service; if none\n are specified, the client will attempt to ascertain the\n credentials from the environment.\n transport (Union[str, UserEventServiceTransport]): The\n transport to use. If set to None, a transport is chosen\n automatically.\n client_options (google.api_core.client_options.ClientOptions): Custom options for the\n client. It won\'t take effect if a ``transport`` instance is provided.\n (1) The ``api_endpoint`` property can be used to override the\n default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT\n environment variable can also be used to override the endpoint:\n "always" (always use the default mTLS endpoint), "never" (always\n use the default regular endpoint) and "auto" (auto switch to the\n default mTLS endpoint if client certificate is present, this is\n the default value). However, the ``api_endpoint`` property takes\n precedence if provided.\n (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable\n is "true", then the ``client_cert_source`` property can be used\n to provide client certificate for mutual TLS transport. If\n not provided, the default SSL client certificate will be used if\n present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not\n set, no client certificate will be used.\n client_info (google.api_core.gapic_v1.client_info.ClientInfo):\n The client info used to send a user-agent string along with\n API requests. If ``None``, then default info will be used.\n Generally, you only need to set this if you\'re developing\n your own client library.\n\n Raises:\n google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport\n creation failed for any reason.\n '
if isinstance(client_options, dict):
client_options = client_options_lib.from_dict(client_options)
if (client_options is None):
client_options = client_options_lib.ClientOptions()
use_client_cert = bool(util.strtobool(os.getenv('GOOGLE_API_USE_CLIENT_CERTIFICATE', 'false')))
client_cert_source_func = None
is_mtls = False
if use_client_cert:
if client_options.client_cert_source:
is_mtls = True
client_cert_source_func = client_options.client_cert_source
else:
is_mtls = mtls.has_default_client_cert_source()
if is_mtls:
client_cert_source_func = mtls.default_client_cert_source()
else:
client_cert_source_func = None
if (client_options.api_endpoint is not None):
api_endpoint = client_options.api_endpoint
else:
use_mtls_env = os.getenv('GOOGLE_API_USE_MTLS_ENDPOINT', 'auto')
if (use_mtls_env == 'never'):
api_endpoint = self.DEFAULT_ENDPOINT
elif (use_mtls_env == 'always'):
api_endpoint = self.DEFAULT_MTLS_ENDPOINT
elif (use_mtls_env == 'auto'):
if is_mtls:
api_endpoint = self.DEFAULT_MTLS_ENDPOINT
else:
api_endpoint = self.DEFAULT_ENDPOINT
else:
raise MutualTLSChannelError('Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted values: never, auto, always')
if isinstance(transport, UserEventServiceTransport):
if (credentials or client_options.credentials_file):
raise ValueError('When providing a transport instance, provide its credentials directly.')
if client_options.scopes:
raise ValueError('When providing a transport instance, provide its scopes directly.')
self._transport = transport
else:
Transport = type(self).get_transport_class(transport)
self._transport = Transport(credentials=credentials, credentials_file=client_options.credentials_file, host=api_endpoint, scopes=client_options.scopes, client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, always_use_jwt_access=True)
|
def write_user_event(self, request: Union[(user_event_service.WriteUserEventRequest, dict)]=None, *, parent: str=None, user_event: gcr_user_event.UserEvent=None, retry: retries.Retry=gapic_v1.method.DEFAULT, timeout: float=None, metadata: Sequence[Tuple[(str, str)]]=()) -> gcr_user_event.UserEvent:
"Writes a single user event.\n\n Args:\n request (Union[google.cloud.recommendationengine_v1beta1.types.WriteUserEventRequest, dict]):\n The request object. Request message for WriteUserEvent\n method.\n parent (str):\n Required. The parent eventStore resource name, such as\n ``projects/1234/locations/global/catalogs/default_catalog/eventStores/default_event_store``.\n\n This corresponds to the ``parent`` field\n on the ``request`` instance; if ``request`` is provided, this\n should not be set.\n user_event (google.cloud.recommendationengine_v1beta1.types.UserEvent):\n Required. User event to write.\n This corresponds to the ``user_event`` field\n on the ``request`` instance; if ``request`` is provided, this\n should not be set.\n retry (google.api_core.retry.Retry): Designation of what errors, if any,\n should be retried.\n timeout (float): The timeout for this request.\n metadata (Sequence[Tuple[str, str]]): Strings which should be\n sent along with the request as metadata.\n\n Returns:\n google.cloud.recommendationengine_v1beta1.types.UserEvent:\n UserEvent captures all metadata\n information recommendation engine needs\n to know about how end users interact\n with customers' website.\n\n "
has_flattened_params = any([parent, user_event])
if ((request is not None) and has_flattened_params):
raise ValueError('If the `request` argument is set, then none of the individual field arguments should be set.')
if (not isinstance(request, user_event_service.WriteUserEventRequest)):
request = user_event_service.WriteUserEventRequest(request)
if (parent is not None):
request.parent = parent
if (user_event is not None):
request.user_event = user_event
rpc = self._transport._wrapped_methods[self._transport.write_user_event]
metadata = (tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((('parent', request.parent),)),))
response = rpc(request, retry=retry, timeout=timeout, metadata=metadata)
return response
| 5,534,637,056,578,274,000
|
Writes a single user event.
Args:
request (Union[google.cloud.recommendationengine_v1beta1.types.WriteUserEventRequest, dict]):
The request object. Request message for WriteUserEvent
method.
parent (str):
Required. The parent eventStore resource name, such as
``projects/1234/locations/global/catalogs/default_catalog/eventStores/default_event_store``.
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
user_event (google.cloud.recommendationengine_v1beta1.types.UserEvent):
Required. User event to write.
This corresponds to the ``user_event`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.recommendationengine_v1beta1.types.UserEvent:
UserEvent captures all metadata
information recommendation engine needs
to know about how end users interact
with customers' website.
|
google/cloud/recommendationengine/v1beta1/recommendationengine-v1beta1-py/google/cloud/recommendationengine_v1beta1/services/user_event_service/client.py
|
write_user_event
|
googleapis/googleapis-gen
|
python
|
def write_user_event(self, request: Union[(user_event_service.WriteUserEventRequest, dict)]=None, *, parent: str=None, user_event: gcr_user_event.UserEvent=None, retry: retries.Retry=gapic_v1.method.DEFAULT, timeout: float=None, metadata: Sequence[Tuple[(str, str)]]=()) -> gcr_user_event.UserEvent:
"Writes a single user event.\n\n Args:\n request (Union[google.cloud.recommendationengine_v1beta1.types.WriteUserEventRequest, dict]):\n The request object. Request message for WriteUserEvent\n method.\n parent (str):\n Required. The parent eventStore resource name, such as\n ``projects/1234/locations/global/catalogs/default_catalog/eventStores/default_event_store``.\n\n This corresponds to the ``parent`` field\n on the ``request`` instance; if ``request`` is provided, this\n should not be set.\n user_event (google.cloud.recommendationengine_v1beta1.types.UserEvent):\n Required. User event to write.\n This corresponds to the ``user_event`` field\n on the ``request`` instance; if ``request`` is provided, this\n should not be set.\n retry (google.api_core.retry.Retry): Designation of what errors, if any,\n should be retried.\n timeout (float): The timeout for this request.\n metadata (Sequence[Tuple[str, str]]): Strings which should be\n sent along with the request as metadata.\n\n Returns:\n google.cloud.recommendationengine_v1beta1.types.UserEvent:\n UserEvent captures all metadata\n information recommendation engine needs\n to know about how end users interact\n with customers' website.\n\n "
has_flattened_params = any([parent, user_event])
if ((request is not None) and has_flattened_params):
raise ValueError('If the `request` argument is set, then none of the individual field arguments should be set.')
if (not isinstance(request, user_event_service.WriteUserEventRequest)):
request = user_event_service.WriteUserEventRequest(request)
if (parent is not None):
request.parent = parent
if (user_event is not None):
request.user_event = user_event
rpc = self._transport._wrapped_methods[self._transport.write_user_event]
metadata = (tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((('parent', request.parent),)),))
response = rpc(request, retry=retry, timeout=timeout, metadata=metadata)
return response
|
def collect_user_event(self, request: Union[(user_event_service.CollectUserEventRequest, dict)]=None, *, parent: str=None, user_event: str=None, uri: str=None, ets: int=None, retry: retries.Retry=gapic_v1.method.DEFAULT, timeout: float=None, metadata: Sequence[Tuple[(str, str)]]=()) -> httpbody_pb2.HttpBody:
"Writes a single user event from the browser. This\n uses a GET request to due to browser restriction of\n POST-ing to a 3rd party domain.\n This method is used only by the Recommendations AI\n JavaScript pixel. Users should not call this method\n directly.\n\n Args:\n request (Union[google.cloud.recommendationengine_v1beta1.types.CollectUserEventRequest, dict]):\n The request object. Request message for CollectUserEvent\n method.\n parent (str):\n Required. The parent eventStore name, such as\n ``projects/1234/locations/global/catalogs/default_catalog/eventStores/default_event_store``.\n\n This corresponds to the ``parent`` field\n on the ``request`` instance; if ``request`` is provided, this\n should not be set.\n user_event (str):\n Required. URL encoded UserEvent\n proto.\n\n This corresponds to the ``user_event`` field\n on the ``request`` instance; if ``request`` is provided, this\n should not be set.\n uri (str):\n Optional. The url including cgi-\n arameters but excluding the hash\n fragment. The URL must be truncated to\n 1.5K bytes to conservatively be under\n the 2K bytes. This is often more useful\n than the referer url, because many\n browsers only send the domain for 3rd\n party requests.\n\n This corresponds to the ``uri`` field\n on the ``request`` instance; if ``request`` is provided, this\n should not be set.\n ets (int):\n Optional. The event timestamp in\n milliseconds. This prevents browser\n caching of otherwise identical get\n requests. The name is abbreviated to\n reduce the payload bytes.\n\n This corresponds to the ``ets`` field\n on the ``request`` instance; if ``request`` is provided, this\n should not be set.\n retry (google.api_core.retry.Retry): Designation of what errors, if any,\n should be retried.\n timeout (float): The timeout for this request.\n metadata (Sequence[Tuple[str, str]]): Strings which should be\n sent along with the request as metadata.\n\n Returns:\n google.api.httpbody_pb2.HttpBody:\n Message that represents an arbitrary HTTP body. It should only be used for\n payload formats that can't be represented as JSON,\n such as raw binary or an HTML page.\n\n This message can be used both in streaming and\n non-streaming API methods in the request as well as\n the response.\n\n It can be used as a top-level request field, which is\n convenient if one wants to extract parameters from\n either the URL or HTTP template into the request\n fields and also want access to the raw HTTP body.\n\n Example:\n\n message GetResourceRequest {\n // A unique request id. string request_id = 1;\n\n // The raw HTTP body is bound to this field.\n google.api.HttpBody http_body = 2;\n\n }\n\n service ResourceService {\n rpc GetResource(GetResourceRequest)\n returns (google.api.HttpBody);\n\n rpc UpdateResource(google.api.HttpBody)\n returns (google.protobuf.Empty);\n\n }\n\n Example with streaming methods:\n\n service CaldavService {\n rpc GetCalendar(stream google.api.HttpBody)\n returns (stream google.api.HttpBody);\n\n rpc UpdateCalendar(stream google.api.HttpBody)\n returns (stream google.api.HttpBody);\n\n }\n\n Use of this type only changes how the request and\n response bodies are handled, all other features will\n continue to work unchanged.\n\n "
has_flattened_params = any([parent, user_event, uri, ets])
if ((request is not None) and has_flattened_params):
raise ValueError('If the `request` argument is set, then none of the individual field arguments should be set.')
if (not isinstance(request, user_event_service.CollectUserEventRequest)):
request = user_event_service.CollectUserEventRequest(request)
if (parent is not None):
request.parent = parent
if (user_event is not None):
request.user_event = user_event
if (uri is not None):
request.uri = uri
if (ets is not None):
request.ets = ets
rpc = self._transport._wrapped_methods[self._transport.collect_user_event]
metadata = (tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((('parent', request.parent),)),))
response = rpc(request, retry=retry, timeout=timeout, metadata=metadata)
return response
| 2,787,104,905,625,894,400
|
Writes a single user event from the browser. This
uses a GET request to due to browser restriction of
POST-ing to a 3rd party domain.
This method is used only by the Recommendations AI
JavaScript pixel. Users should not call this method
directly.
Args:
request (Union[google.cloud.recommendationengine_v1beta1.types.CollectUserEventRequest, dict]):
The request object. Request message for CollectUserEvent
method.
parent (str):
Required. The parent eventStore name, such as
``projects/1234/locations/global/catalogs/default_catalog/eventStores/default_event_store``.
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
user_event (str):
Required. URL encoded UserEvent
proto.
This corresponds to the ``user_event`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
uri (str):
Optional. The url including cgi-
arameters but excluding the hash
fragment. The URL must be truncated to
1.5K bytes to conservatively be under
the 2K bytes. This is often more useful
than the referer url, because many
browsers only send the domain for 3rd
party requests.
This corresponds to the ``uri`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
ets (int):
Optional. The event timestamp in
milliseconds. This prevents browser
caching of otherwise identical get
requests. The name is abbreviated to
reduce the payload bytes.
This corresponds to the ``ets`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.api.httpbody_pb2.HttpBody:
Message that represents an arbitrary HTTP body. It should only be used for
payload formats that can't be represented as JSON,
such as raw binary or an HTML page.
This message can be used both in streaming and
non-streaming API methods in the request as well as
the response.
It can be used as a top-level request field, which is
convenient if one wants to extract parameters from
either the URL or HTTP template into the request
fields and also want access to the raw HTTP body.
Example:
message GetResourceRequest {
// A unique request id. string request_id = 1;
// The raw HTTP body is bound to this field.
google.api.HttpBody http_body = 2;
}
service ResourceService {
rpc GetResource(GetResourceRequest)
returns (google.api.HttpBody);
rpc UpdateResource(google.api.HttpBody)
returns (google.protobuf.Empty);
}
Example with streaming methods:
service CaldavService {
rpc GetCalendar(stream google.api.HttpBody)
returns (stream google.api.HttpBody);
rpc UpdateCalendar(stream google.api.HttpBody)
returns (stream google.api.HttpBody);
}
Use of this type only changes how the request and
response bodies are handled, all other features will
continue to work unchanged.
|
google/cloud/recommendationengine/v1beta1/recommendationengine-v1beta1-py/google/cloud/recommendationengine_v1beta1/services/user_event_service/client.py
|
collect_user_event
|
googleapis/googleapis-gen
|
python
|
def collect_user_event(self, request: Union[(user_event_service.CollectUserEventRequest, dict)]=None, *, parent: str=None, user_event: str=None, uri: str=None, ets: int=None, retry: retries.Retry=gapic_v1.method.DEFAULT, timeout: float=None, metadata: Sequence[Tuple[(str, str)]]=()) -> httpbody_pb2.HttpBody:
"Writes a single user event from the browser. This\n uses a GET request to due to browser restriction of\n POST-ing to a 3rd party domain.\n This method is used only by the Recommendations AI\n JavaScript pixel. Users should not call this method\n directly.\n\n Args:\n request (Union[google.cloud.recommendationengine_v1beta1.types.CollectUserEventRequest, dict]):\n The request object. Request message for CollectUserEvent\n method.\n parent (str):\n Required. The parent eventStore name, such as\n ``projects/1234/locations/global/catalogs/default_catalog/eventStores/default_event_store``.\n\n This corresponds to the ``parent`` field\n on the ``request`` instance; if ``request`` is provided, this\n should not be set.\n user_event (str):\n Required. URL encoded UserEvent\n proto.\n\n This corresponds to the ``user_event`` field\n on the ``request`` instance; if ``request`` is provided, this\n should not be set.\n uri (str):\n Optional. The url including cgi-\n arameters but excluding the hash\n fragment. The URL must be truncated to\n 1.5K bytes to conservatively be under\n the 2K bytes. This is often more useful\n than the referer url, because many\n browsers only send the domain for 3rd\n party requests.\n\n This corresponds to the ``uri`` field\n on the ``request`` instance; if ``request`` is provided, this\n should not be set.\n ets (int):\n Optional. The event timestamp in\n milliseconds. This prevents browser\n caching of otherwise identical get\n requests. The name is abbreviated to\n reduce the payload bytes.\n\n This corresponds to the ``ets`` field\n on the ``request`` instance; if ``request`` is provided, this\n should not be set.\n retry (google.api_core.retry.Retry): Designation of what errors, if any,\n should be retried.\n timeout (float): The timeout for this request.\n metadata (Sequence[Tuple[str, str]]): Strings which should be\n sent along with the request as metadata.\n\n Returns:\n google.api.httpbody_pb2.HttpBody:\n Message that represents an arbitrary HTTP body. It should only be used for\n payload formats that can't be represented as JSON,\n such as raw binary or an HTML page.\n\n This message can be used both in streaming and\n non-streaming API methods in the request as well as\n the response.\n\n It can be used as a top-level request field, which is\n convenient if one wants to extract parameters from\n either the URL or HTTP template into the request\n fields and also want access to the raw HTTP body.\n\n Example:\n\n message GetResourceRequest {\n // A unique request id. string request_id = 1;\n\n // The raw HTTP body is bound to this field.\n google.api.HttpBody http_body = 2;\n\n }\n\n service ResourceService {\n rpc GetResource(GetResourceRequest)\n returns (google.api.HttpBody);\n\n rpc UpdateResource(google.api.HttpBody)\n returns (google.protobuf.Empty);\n\n }\n\n Example with streaming methods:\n\n service CaldavService {\n rpc GetCalendar(stream google.api.HttpBody)\n returns (stream google.api.HttpBody);\n\n rpc UpdateCalendar(stream google.api.HttpBody)\n returns (stream google.api.HttpBody);\n\n }\n\n Use of this type only changes how the request and\n response bodies are handled, all other features will\n continue to work unchanged.\n\n "
has_flattened_params = any([parent, user_event, uri, ets])
if ((request is not None) and has_flattened_params):
raise ValueError('If the `request` argument is set, then none of the individual field arguments should be set.')
if (not isinstance(request, user_event_service.CollectUserEventRequest)):
request = user_event_service.CollectUserEventRequest(request)
if (parent is not None):
request.parent = parent
if (user_event is not None):
request.user_event = user_event
if (uri is not None):
request.uri = uri
if (ets is not None):
request.ets = ets
rpc = self._transport._wrapped_methods[self._transport.collect_user_event]
metadata = (tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((('parent', request.parent),)),))
response = rpc(request, retry=retry, timeout=timeout, metadata=metadata)
return response
|
def list_user_events(self, request: Union[(user_event_service.ListUserEventsRequest, dict)]=None, *, parent: str=None, filter: str=None, retry: retries.Retry=gapic_v1.method.DEFAULT, timeout: float=None, metadata: Sequence[Tuple[(str, str)]]=()) -> pagers.ListUserEventsPager:
'Gets a list of user events within a time range, with\n potential filtering.\n\n Args:\n request (Union[google.cloud.recommendationengine_v1beta1.types.ListUserEventsRequest, dict]):\n The request object. Request message for ListUserEvents\n method.\n parent (str):\n Required. The parent eventStore resource name, such as\n ``projects/*/locations/*/catalogs/default_catalog/eventStores/default_event_store``.\n\n This corresponds to the ``parent`` field\n on the ``request`` instance; if ``request`` is provided, this\n should not be set.\n filter (str):\n Optional. Filtering expression to specify restrictions\n over returned events. This is a sequence of terms, where\n each term applies some kind of a restriction to the\n returned user events. Use this expression to restrict\n results to a specific time range, or filter events by\n eventType. eg: eventTime > "2012-04-23T18:25:43.511Z"\n eventsMissingCatalogItems\n eventTime<"2012-04-23T18:25:43.511Z" eventType=search\n\n We expect only 3 types of fields:\n\n ::\n\n * eventTime: this can be specified a maximum of 2 times, once with a\n less than operator and once with a greater than operator. The\n eventTime restrict should result in one contiguous valid eventTime\n range.\n\n * eventType: only 1 eventType restriction can be specified.\n\n * eventsMissingCatalogItems: specififying this will restrict results\n to events for which catalog items were not found in the catalog. The\n default behavior is to return only those events for which catalog\n items were found.\n\n Some examples of valid filters expressions:\n\n - Example 1: eventTime > "2012-04-23T18:25:43.511Z"\n eventTime < "2012-04-23T18:30:43.511Z"\n - Example 2: eventTime > "2012-04-23T18:25:43.511Z"\n eventType = detail-page-view\n - Example 3: eventsMissingCatalogItems eventType =\n search eventTime < "2018-04-23T18:30:43.511Z"\n - Example 4: eventTime > "2012-04-23T18:25:43.511Z"\n - Example 5: eventType = search\n - Example 6: eventsMissingCatalogItems\n\n This corresponds to the ``filter`` field\n on the ``request`` instance; if ``request`` is provided, this\n should not be set.\n retry (google.api_core.retry.Retry): Designation of what errors, if any,\n should be retried.\n timeout (float): The timeout for this request.\n metadata (Sequence[Tuple[str, str]]): Strings which should be\n sent along with the request as metadata.\n\n Returns:\n google.cloud.recommendationengine_v1beta1.services.user_event_service.pagers.ListUserEventsPager:\n Response message for ListUserEvents\n method.\n Iterating over this object will yield\n results and resolve additional pages\n automatically.\n\n '
has_flattened_params = any([parent, filter])
if ((request is not None) and has_flattened_params):
raise ValueError('If the `request` argument is set, then none of the individual field arguments should be set.')
if (not isinstance(request, user_event_service.ListUserEventsRequest)):
request = user_event_service.ListUserEventsRequest(request)
if (parent is not None):
request.parent = parent
if (filter is not None):
request.filter = filter
rpc = self._transport._wrapped_methods[self._transport.list_user_events]
metadata = (tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((('parent', request.parent),)),))
response = rpc(request, retry=retry, timeout=timeout, metadata=metadata)
response = pagers.ListUserEventsPager(method=rpc, request=request, response=response, metadata=metadata)
return response
| 373,812,839,536,061,600
|
Gets a list of user events within a time range, with
potential filtering.
Args:
request (Union[google.cloud.recommendationengine_v1beta1.types.ListUserEventsRequest, dict]):
The request object. Request message for ListUserEvents
method.
parent (str):
Required. The parent eventStore resource name, such as
``projects/*/locations/*/catalogs/default_catalog/eventStores/default_event_store``.
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
filter (str):
Optional. Filtering expression to specify restrictions
over returned events. This is a sequence of terms, where
each term applies some kind of a restriction to the
returned user events. Use this expression to restrict
results to a specific time range, or filter events by
eventType. eg: eventTime > "2012-04-23T18:25:43.511Z"
eventsMissingCatalogItems
eventTime<"2012-04-23T18:25:43.511Z" eventType=search
We expect only 3 types of fields:
::
* eventTime: this can be specified a maximum of 2 times, once with a
less than operator and once with a greater than operator. The
eventTime restrict should result in one contiguous valid eventTime
range.
* eventType: only 1 eventType restriction can be specified.
* eventsMissingCatalogItems: specififying this will restrict results
to events for which catalog items were not found in the catalog. The
default behavior is to return only those events for which catalog
items were found.
Some examples of valid filters expressions:
- Example 1: eventTime > "2012-04-23T18:25:43.511Z"
eventTime < "2012-04-23T18:30:43.511Z"
- Example 2: eventTime > "2012-04-23T18:25:43.511Z"
eventType = detail-page-view
- Example 3: eventsMissingCatalogItems eventType =
search eventTime < "2018-04-23T18:30:43.511Z"
- Example 4: eventTime > "2012-04-23T18:25:43.511Z"
- Example 5: eventType = search
- Example 6: eventsMissingCatalogItems
This corresponds to the ``filter`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.recommendationengine_v1beta1.services.user_event_service.pagers.ListUserEventsPager:
Response message for ListUserEvents
method.
Iterating over this object will yield
results and resolve additional pages
automatically.
|
google/cloud/recommendationengine/v1beta1/recommendationengine-v1beta1-py/google/cloud/recommendationengine_v1beta1/services/user_event_service/client.py
|
list_user_events
|
googleapis/googleapis-gen
|
python
|
def list_user_events(self, request: Union[(user_event_service.ListUserEventsRequest, dict)]=None, *, parent: str=None, filter: str=None, retry: retries.Retry=gapic_v1.method.DEFAULT, timeout: float=None, metadata: Sequence[Tuple[(str, str)]]=()) -> pagers.ListUserEventsPager:
'Gets a list of user events within a time range, with\n potential filtering.\n\n Args:\n request (Union[google.cloud.recommendationengine_v1beta1.types.ListUserEventsRequest, dict]):\n The request object. Request message for ListUserEvents\n method.\n parent (str):\n Required. The parent eventStore resource name, such as\n ``projects/*/locations/*/catalogs/default_catalog/eventStores/default_event_store``.\n\n This corresponds to the ``parent`` field\n on the ``request`` instance; if ``request`` is provided, this\n should not be set.\n filter (str):\n Optional. Filtering expression to specify restrictions\n over returned events. This is a sequence of terms, where\n each term applies some kind of a restriction to the\n returned user events. Use this expression to restrict\n results to a specific time range, or filter events by\n eventType. eg: eventTime > "2012-04-23T18:25:43.511Z"\n eventsMissingCatalogItems\n eventTime<"2012-04-23T18:25:43.511Z" eventType=search\n\n We expect only 3 types of fields:\n\n ::\n\n * eventTime: this can be specified a maximum of 2 times, once with a\n less than operator and once with a greater than operator. The\n eventTime restrict should result in one contiguous valid eventTime\n range.\n\n * eventType: only 1 eventType restriction can be specified.\n\n * eventsMissingCatalogItems: specififying this will restrict results\n to events for which catalog items were not found in the catalog. The\n default behavior is to return only those events for which catalog\n items were found.\n\n Some examples of valid filters expressions:\n\n - Example 1: eventTime > "2012-04-23T18:25:43.511Z"\n eventTime < "2012-04-23T18:30:43.511Z"\n - Example 2: eventTime > "2012-04-23T18:25:43.511Z"\n eventType = detail-page-view\n - Example 3: eventsMissingCatalogItems eventType =\n search eventTime < "2018-04-23T18:30:43.511Z"\n - Example 4: eventTime > "2012-04-23T18:25:43.511Z"\n - Example 5: eventType = search\n - Example 6: eventsMissingCatalogItems\n\n This corresponds to the ``filter`` field\n on the ``request`` instance; if ``request`` is provided, this\n should not be set.\n retry (google.api_core.retry.Retry): Designation of what errors, if any,\n should be retried.\n timeout (float): The timeout for this request.\n metadata (Sequence[Tuple[str, str]]): Strings which should be\n sent along with the request as metadata.\n\n Returns:\n google.cloud.recommendationengine_v1beta1.services.user_event_service.pagers.ListUserEventsPager:\n Response message for ListUserEvents\n method.\n Iterating over this object will yield\n results and resolve additional pages\n automatically.\n\n '
has_flattened_params = any([parent, filter])
if ((request is not None) and has_flattened_params):
raise ValueError('If the `request` argument is set, then none of the individual field arguments should be set.')
if (not isinstance(request, user_event_service.ListUserEventsRequest)):
request = user_event_service.ListUserEventsRequest(request)
if (parent is not None):
request.parent = parent
if (filter is not None):
request.filter = filter
rpc = self._transport._wrapped_methods[self._transport.list_user_events]
metadata = (tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((('parent', request.parent),)),))
response = rpc(request, retry=retry, timeout=timeout, metadata=metadata)
response = pagers.ListUserEventsPager(method=rpc, request=request, response=response, metadata=metadata)
return response
|
def purge_user_events(self, request: Union[(user_event_service.PurgeUserEventsRequest, dict)]=None, *, parent: str=None, filter: str=None, force: bool=None, retry: retries.Retry=gapic_v1.method.DEFAULT, timeout: float=None, metadata: Sequence[Tuple[(str, str)]]=()) -> operation.Operation:
'Deletes permanently all user events specified by the\n filter provided. Depending on the number of events\n specified by the filter, this operation could take hours\n or days to complete. To test a filter, use the list\n command first.\n\n Args:\n request (Union[google.cloud.recommendationengine_v1beta1.types.PurgeUserEventsRequest, dict]):\n The request object. Request message for PurgeUserEvents\n method.\n parent (str):\n Required. The resource name of the event_store under\n which the events are created. The format is\n ``projects/${projectId}/locations/global/catalogs/${catalogId}/eventStores/${eventStoreId}``\n\n This corresponds to the ``parent`` field\n on the ``request`` instance; if ``request`` is provided, this\n should not be set.\n filter (str):\n Required. The filter string to specify the events to be\n deleted. Empty string filter is not allowed. This filter\n can also be used with ListUserEvents API to list events\n that will be deleted. The eligible fields for filtering\n are:\n\n - eventType - UserEvent.eventType field of type string.\n - eventTime - in ISO 8601 "zulu" format.\n - visitorId - field of type string. Specifying this\n will delete all events associated with a visitor.\n - userId - field of type string. Specifying this will\n delete all events associated with a user. Example 1:\n Deleting all events in a time range.\n ``eventTime > "2012-04-23T18:25:43.511Z" eventTime < "2012-04-23T18:30:43.511Z"``\n Example 2: Deleting specific eventType in time range.\n ``eventTime > "2012-04-23T18:25:43.511Z" eventType = "detail-page-view"``\n Example 3: Deleting all events for a specific visitor\n ``visitorId = visitor1024`` The filtering fields are\n assumed to have an implicit AND.\n\n This corresponds to the ``filter`` field\n on the ``request`` instance; if ``request`` is provided, this\n should not be set.\n force (bool):\n Optional. The default value is false.\n Override this flag to true to actually\n perform the purge. If the field is not\n set to true, a sampling of events to be\n deleted will be returned.\n\n This corresponds to the ``force`` field\n on the ``request`` instance; if ``request`` is provided, this\n should not be set.\n retry (google.api_core.retry.Retry): Designation of what errors, if any,\n should be retried.\n timeout (float): The timeout for this request.\n metadata (Sequence[Tuple[str, str]]): Strings which should be\n sent along with the request as metadata.\n\n Returns:\n google.api_core.operation.Operation:\n An object representing a long-running operation.\n\n The result type for the operation will be :class:`google.cloud.recommendationengine_v1beta1.types.PurgeUserEventsResponse` Response of the PurgeUserEventsRequest. If the long running operation is\n successfully done, then this message is returned by\n the google.longrunning.Operations.response field.\n\n '
has_flattened_params = any([parent, filter, force])
if ((request is not None) and has_flattened_params):
raise ValueError('If the `request` argument is set, then none of the individual field arguments should be set.')
if (not isinstance(request, user_event_service.PurgeUserEventsRequest)):
request = user_event_service.PurgeUserEventsRequest(request)
if (parent is not None):
request.parent = parent
if (filter is not None):
request.filter = filter
if (force is not None):
request.force = force
rpc = self._transport._wrapped_methods[self._transport.purge_user_events]
metadata = (tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((('parent', request.parent),)),))
response = rpc(request, retry=retry, timeout=timeout, metadata=metadata)
response = operation.from_gapic(response, self._transport.operations_client, user_event_service.PurgeUserEventsResponse, metadata_type=user_event_service.PurgeUserEventsMetadata)
return response
| 8,026,864,222,891,210,000
|
Deletes permanently all user events specified by the
filter provided. Depending on the number of events
specified by the filter, this operation could take hours
or days to complete. To test a filter, use the list
command first.
Args:
request (Union[google.cloud.recommendationengine_v1beta1.types.PurgeUserEventsRequest, dict]):
The request object. Request message for PurgeUserEvents
method.
parent (str):
Required. The resource name of the event_store under
which the events are created. The format is
``projects/${projectId}/locations/global/catalogs/${catalogId}/eventStores/${eventStoreId}``
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
filter (str):
Required. The filter string to specify the events to be
deleted. Empty string filter is not allowed. This filter
can also be used with ListUserEvents API to list events
that will be deleted. The eligible fields for filtering
are:
- eventType - UserEvent.eventType field of type string.
- eventTime - in ISO 8601 "zulu" format.
- visitorId - field of type string. Specifying this
will delete all events associated with a visitor.
- userId - field of type string. Specifying this will
delete all events associated with a user. Example 1:
Deleting all events in a time range.
``eventTime > "2012-04-23T18:25:43.511Z" eventTime < "2012-04-23T18:30:43.511Z"``
Example 2: Deleting specific eventType in time range.
``eventTime > "2012-04-23T18:25:43.511Z" eventType = "detail-page-view"``
Example 3: Deleting all events for a specific visitor
``visitorId = visitor1024`` The filtering fields are
assumed to have an implicit AND.
This corresponds to the ``filter`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
force (bool):
Optional. The default value is false.
Override this flag to true to actually
perform the purge. If the field is not
set to true, a sampling of events to be
deleted will be returned.
This corresponds to the ``force`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.api_core.operation.Operation:
An object representing a long-running operation.
The result type for the operation will be :class:`google.cloud.recommendationengine_v1beta1.types.PurgeUserEventsResponse` Response of the PurgeUserEventsRequest. If the long running operation is
successfully done, then this message is returned by
the google.longrunning.Operations.response field.
|
google/cloud/recommendationengine/v1beta1/recommendationengine-v1beta1-py/google/cloud/recommendationengine_v1beta1/services/user_event_service/client.py
|
purge_user_events
|
googleapis/googleapis-gen
|
python
|
def purge_user_events(self, request: Union[(user_event_service.PurgeUserEventsRequest, dict)]=None, *, parent: str=None, filter: str=None, force: bool=None, retry: retries.Retry=gapic_v1.method.DEFAULT, timeout: float=None, metadata: Sequence[Tuple[(str, str)]]=()) -> operation.Operation:
'Deletes permanently all user events specified by the\n filter provided. Depending on the number of events\n specified by the filter, this operation could take hours\n or days to complete. To test a filter, use the list\n command first.\n\n Args:\n request (Union[google.cloud.recommendationengine_v1beta1.types.PurgeUserEventsRequest, dict]):\n The request object. Request message for PurgeUserEvents\n method.\n parent (str):\n Required. The resource name of the event_store under\n which the events are created. The format is\n ``projects/${projectId}/locations/global/catalogs/${catalogId}/eventStores/${eventStoreId}``\n\n This corresponds to the ``parent`` field\n on the ``request`` instance; if ``request`` is provided, this\n should not be set.\n filter (str):\n Required. The filter string to specify the events to be\n deleted. Empty string filter is not allowed. This filter\n can also be used with ListUserEvents API to list events\n that will be deleted. The eligible fields for filtering\n are:\n\n - eventType - UserEvent.eventType field of type string.\n - eventTime - in ISO 8601 "zulu" format.\n - visitorId - field of type string. Specifying this\n will delete all events associated with a visitor.\n - userId - field of type string. Specifying this will\n delete all events associated with a user. Example 1:\n Deleting all events in a time range.\n ``eventTime > "2012-04-23T18:25:43.511Z" eventTime < "2012-04-23T18:30:43.511Z"``\n Example 2: Deleting specific eventType in time range.\n ``eventTime > "2012-04-23T18:25:43.511Z" eventType = "detail-page-view"``\n Example 3: Deleting all events for a specific visitor\n ``visitorId = visitor1024`` The filtering fields are\n assumed to have an implicit AND.\n\n This corresponds to the ``filter`` field\n on the ``request`` instance; if ``request`` is provided, this\n should not be set.\n force (bool):\n Optional. The default value is false.\n Override this flag to true to actually\n perform the purge. If the field is not\n set to true, a sampling of events to be\n deleted will be returned.\n\n This corresponds to the ``force`` field\n on the ``request`` instance; if ``request`` is provided, this\n should not be set.\n retry (google.api_core.retry.Retry): Designation of what errors, if any,\n should be retried.\n timeout (float): The timeout for this request.\n metadata (Sequence[Tuple[str, str]]): Strings which should be\n sent along with the request as metadata.\n\n Returns:\n google.api_core.operation.Operation:\n An object representing a long-running operation.\n\n The result type for the operation will be :class:`google.cloud.recommendationengine_v1beta1.types.PurgeUserEventsResponse` Response of the PurgeUserEventsRequest. If the long running operation is\n successfully done, then this message is returned by\n the google.longrunning.Operations.response field.\n\n '
has_flattened_params = any([parent, filter, force])
if ((request is not None) and has_flattened_params):
raise ValueError('If the `request` argument is set, then none of the individual field arguments should be set.')
if (not isinstance(request, user_event_service.PurgeUserEventsRequest)):
request = user_event_service.PurgeUserEventsRequest(request)
if (parent is not None):
request.parent = parent
if (filter is not None):
request.filter = filter
if (force is not None):
request.force = force
rpc = self._transport._wrapped_methods[self._transport.purge_user_events]
metadata = (tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((('parent', request.parent),)),))
response = rpc(request, retry=retry, timeout=timeout, metadata=metadata)
response = operation.from_gapic(response, self._transport.operations_client, user_event_service.PurgeUserEventsResponse, metadata_type=user_event_service.PurgeUserEventsMetadata)
return response
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.