function stringlengths 11 56k | repo_name stringlengths 5 60 | features list |
|---|---|---|
def test_sum(self):
self.assertEqualOnAllArrayTypes(sum_values, [1, 2, 3, 4.1], 10.1)
self.assertEqualOnAllArrayTypes(sum_values, [-1.2, -2, -3, -4], -10.2)
self.assertEqualOnAllArrayTypes(sum_values, [], 0) | blue-yonder/tsfresh | [
7135,
1120,
7135,
61,
1477481357
] |
def test_agg_autocorrelation_returns_max_lag_does_not_affect_other_results(self):
param = [{"f_agg": "mean", "maxlag": 1}, {"f_agg": "mean", "maxlag": 10}]
x = range(10)
res1 = dict(agg_autocorrelation(x, param=param))['f_agg_"mean"__maxlag_1']
res10 = dict(agg_autocorrelation(x, param=param))['f_agg_"mean"__maxlag_10']
self.assertAlmostEqual(res1, 0.77777777, places=4)
self.assertAlmostEqual(res10, -0.64983164983165, places=4)
param = [{"f_agg": "mean", "maxlag": 1}]
x = range(10)
res1 = dict(agg_autocorrelation(x, param=param))['f_agg_"mean"__maxlag_1']
self.assertAlmostEqual(res1, 0.77777777, places=4) | blue-yonder/tsfresh | [
7135,
1120,
7135,
61,
1477481357
] |
def test_augmented_dickey_fuller(self):
# todo: add unit test for the values of the test statistic
# the adf hypothesis test checks for unit roots,
# so H_0 = {random drift} vs H_1 = {AR(1) model}
# H0 is true
np.random.seed(seed=42)
x = np.cumsum(np.random.uniform(size=100))
param = [
{"autolag": "BIC", "attr": "teststat"},
{"autolag": "BIC", "attr": "pvalue"},
{"autolag": "BIC", "attr": "usedlag"},
]
expected_index = [
'attr_"teststat"__autolag_"BIC"',
'attr_"pvalue"__autolag_"BIC"',
'attr_"usedlag"__autolag_"BIC"',
]
res = augmented_dickey_fuller(x=x, param=param)
res = pd.Series(dict(res))
self.assertCountEqual(list(res.index), expected_index)
self.assertGreater(res['attr_"pvalue"__autolag_"BIC"'], 0.10)
self.assertEqual(res['attr_"usedlag"__autolag_"BIC"'], 0)
# H0 should be rejected for AR(1) model with x_{t} = 1/2 x_{t-1} + e_{t}
np.random.seed(seed=42)
e = np.random.normal(0.1, 0.1, size=100)
m = 50
x = [0] * m
x[0] = 100
for i in range(1, m):
x[i] = x[i - 1] * 0.5 + e[i]
param = [
{"autolag": "AIC", "attr": "teststat"},
{"autolag": "AIC", "attr": "pvalue"},
{"autolag": "AIC", "attr": "usedlag"},
]
expected_index = [
'attr_"teststat"__autolag_"AIC"',
'attr_"pvalue"__autolag_"AIC"',
'attr_"usedlag"__autolag_"AIC"',
]
res = augmented_dickey_fuller(x=x, param=param)
res = pd.Series(dict(res))
self.assertCountEqual(list(res.index), expected_index)
self.assertLessEqual(res['attr_"pvalue"__autolag_"AIC"'], 0.05)
self.assertEqual(res['attr_"usedlag"__autolag_"AIC"'], 0)
# Check if LinAlgError and ValueError are catched
res_linalg_error = augmented_dickey_fuller(
x=np.repeat(np.nan, 100), param=param
)
res_value_error = augmented_dickey_fuller(x=[], param=param)
for index, val in res_linalg_error:
self.assertIsNaN(val)
for index, val in res_value_error:
self.assertIsNaN(val)
# Should return NaN if "attr" is unknown
res_attr_error = augmented_dickey_fuller(
x=x, param=[{"autolag": "AIC", "attr": ""}]
)
for index, val in res_attr_error:
self.assertIsNaN(val) | blue-yonder/tsfresh | [
7135,
1120,
7135,
61,
1477481357
] |
def test_cid_ce(self):
self.assertEqualOnAllArrayTypes(cid_ce, [1, 1, 1], 0, normalize=True)
self.assertEqualOnAllArrayTypes(cid_ce, [0, 4], 2, normalize=True)
self.assertEqualOnAllArrayTypes(cid_ce, [100, 104], 2, normalize=True)
self.assertEqualOnAllArrayTypes(cid_ce, [1, 1, 1], 0, normalize=False)
self.assertEqualOnAllArrayTypes(cid_ce, [0.5, 3.5, 7.5], 5, normalize=False)
self.assertEqualOnAllArrayTypes(
cid_ce, [-4.33, -1.33, 2.67], 5, normalize=False
) | blue-yonder/tsfresh | [
7135,
1120,
7135,
61,
1477481357
] |
def test_fourier_entropy(self):
self.assertAlmostEqualOnAllArrayTypes(
fourier_entropy, [1, 2, 1], 0.693147180, bins=2
)
self.assertAlmostEqualOnAllArrayTypes(
fourier_entropy, [1, 2, 1], 0.693147180, bins=5
)
self.assertAlmostEqualOnAllArrayTypes(
fourier_entropy, [1, 1, 2, 1, 1, 1, 1], 0.5623351446188083, bins=5
)
self.assertAlmostEqualOnAllArrayTypes(
fourier_entropy, [1, 1, 1, 1, 2, 1, 1], 1.0397207708399179, bins=5
)
self.assertAlmostEqualOnAllArrayTypes(
fourier_entropy,
[-1, 4.3, 5, 1, -4.5, 1, 5, 7, -3.4, 6],
1.5607104090414063,
bins=10,
)
self.assertIsNanOnAllArrayTypes(
fourier_entropy, [-1, np.nan, 5, 1, -4.5, 1, 5, 7, -3.4, 6], bins=10
) | blue-yonder/tsfresh | [
7135,
1120,
7135,
61,
1477481357
] |
def test_ratio_beyond_r_sigma(self):
x = [0, 1] * 10 + [10, 20, -30] # std of x is 7.21, mean 3.04
self.assertEqualOnAllArrayTypes(ratio_beyond_r_sigma, x, 3.0 / len(x), r=1)
self.assertEqualOnAllArrayTypes(ratio_beyond_r_sigma, x, 2.0 / len(x), r=2)
self.assertEqualOnAllArrayTypes(ratio_beyond_r_sigma, x, 1.0 / len(x), r=3)
self.assertEqualOnAllArrayTypes(ratio_beyond_r_sigma, x, 0, r=20) | blue-yonder/tsfresh | [
7135,
1120,
7135,
61,
1477481357
] |
def test_mean_change(self):
self.assertEqualOnAllArrayTypes(mean_change, [-2, 2, 5], 3.5)
self.assertEqualOnAllArrayTypes(mean_change, [1, 2, -1], -1)
self.assertEqualOnAllArrayTypes(mean_change, [10, 20], 10)
self.assertIsNanOnAllArrayTypes(mean_change, [1])
self.assertIsNanOnAllArrayTypes(mean_change, []) | blue-yonder/tsfresh | [
7135,
1120,
7135,
61,
1477481357
] |
def test_median(self):
self.assertEqualOnAllArrayTypes(median, [1, 1, 2, 2], 1.5)
self.assertEqualOnAllArrayTypes(median, [0.5, 0.5, 2, 3.5, 10], 2)
self.assertEqualOnAllArrayTypes(median, [0.5], 0.5)
self.assertIsNanOnAllArrayTypes(median, []) | blue-yonder/tsfresh | [
7135,
1120,
7135,
61,
1477481357
] |
def test_length(self):
self.assertEqualOnAllArrayTypes(length, [1, 2, 3, 4], 4)
self.assertEqualOnAllArrayTypes(length, [1, 2, 3], 3)
self.assertEqualOnAllArrayTypes(length, [1, 2], 2)
self.assertEqualOnAllArrayTypes(length, [1, 2, 3, np.NaN], 4)
self.assertEqualOnAllArrayTypes(length, [], 0) | blue-yonder/tsfresh | [
7135,
1120,
7135,
61,
1477481357
] |
def test_variation_coefficient(self):
self.assertIsNanOnAllArrayTypes(
variation_coefficient, [1, 1, -1, -1],
)
self.assertAlmostEqualOnAllArrayTypes(
variation_coefficient, [1, 2, -3, -1], -7.681145747868608
)
self.assertAlmostEqualOnAllArrayTypes(
variation_coefficient, [1, 2, 4, -1], 1.2018504251546631
)
self.assertIsNanOnAllArrayTypes(variation_coefficient, []) | blue-yonder/tsfresh | [
7135,
1120,
7135,
61,
1477481357
] |
def test_skewness(self):
self.assertEqualOnAllArrayTypes(skewness, [1, 1, 1, 2, 2, 2], 0)
self.assertAlmostEqualOnAllArrayTypes(
skewness, [1, 1, 1, 2, 2], 0.6085806194501855
)
self.assertEqualOnAllArrayTypes(skewness, [1, 1, 1], 0)
self.assertIsNanOnAllArrayTypes(skewness, [1, 1]) | blue-yonder/tsfresh | [
7135,
1120,
7135,
61,
1477481357
] |
def test_root_mean_square(self):
self.assertAlmostEqualOnAllArrayTypes(
root_mean_square, [1, 1, 1, 2, 2], 1.4832396974191
)
self.assertAlmostEqualOnAllArrayTypes(root_mean_square, [0], 0)
self.assertIsNanOnAllArrayTypes(root_mean_square, [])
self.assertAlmostEqualOnAllArrayTypes(root_mean_square, [1], 1)
self.assertAlmostEqualOnAllArrayTypes(root_mean_square, [-1], 1) | blue-yonder/tsfresh | [
7135,
1120,
7135,
61,
1477481357
] |
def test_absolute_sum_of_changes(self):
self.assertEqualOnAllArrayTypes(absolute_sum_of_changes, [1, 1, 1, 1, 2, 1], 2)
self.assertEqualOnAllArrayTypes(absolute_sum_of_changes, [1, -1, 1, -1], 6)
self.assertEqualOnAllArrayTypes(absolute_sum_of_changes, [1], 0)
self.assertEqualOnAllArrayTypes(absolute_sum_of_changes, [], 0) | blue-yonder/tsfresh | [
7135,
1120,
7135,
61,
1477481357
] |
def test_longest_strike_above_mean(self):
self.assertEqualOnAllArrayTypes(
longest_strike_above_mean, [1, 2, 1, 2, 1, 2, 2, 1], 2
)
self.assertEqualOnAllArrayTypes(
longest_strike_above_mean, [1, 2, 3, 4, 5, 6], 3
)
self.assertEqualOnAllArrayTypes(longest_strike_above_mean, [1, 2, 3, 4, 5], 2)
self.assertEqualOnAllArrayTypes(longest_strike_above_mean, [1, 2, 1], 1)
self.assertEqualOnAllArrayTypes(longest_strike_above_mean, [], 0) | blue-yonder/tsfresh | [
7135,
1120,
7135,
61,
1477481357
] |
def test_count_below_mean(self):
self.assertEqualOnAllArrayTypes(count_below_mean, [1, 2, 1, 2, 1, 2], 3)
self.assertEqualOnAllArrayTypes(count_below_mean, [1, 1, 1, 1, 1, 2], 5)
self.assertEqualOnAllArrayTypes(count_below_mean, [1, 1, 1, 1, 1], 0)
self.assertEqualOnAllArrayTypes(count_below_mean, [], 0) | blue-yonder/tsfresh | [
7135,
1120,
7135,
61,
1477481357
] |
def test_first_location_of_maximum(self):
self.assertAlmostEqualOnAllArrayTypes(
first_location_of_maximum, [1, 2, 1, 2, 1], 0.2
)
self.assertAlmostEqualOnAllArrayTypes(
first_location_of_maximum, [1, 2, 1, 1, 2], 0.2
)
self.assertAlmostEqualOnAllArrayTypes(
first_location_of_maximum, [2, 1, 1, 1, 1], 0.0
)
self.assertAlmostEqualOnAllArrayTypes(
first_location_of_maximum, [1, 1, 1, 1, 1], 0.0
)
self.assertAlmostEqualOnAllArrayTypes(first_location_of_maximum, [1], 0.0)
self.assertIsNanOnAllArrayTypes(first_location_of_maximum, []) | blue-yonder/tsfresh | [
7135,
1120,
7135,
61,
1477481357
] |
def test_first_location_of_minimum(self):
self.assertAlmostEqualOnAllArrayTypes(
first_location_of_minimum, [1, 2, 1, 2, 1], 0.0
)
self.assertAlmostEqualOnAllArrayTypes(
first_location_of_minimum, [2, 2, 1, 2, 2], 0.4
)
self.assertAlmostEqualOnAllArrayTypes(
first_location_of_minimum, [2, 1, 1, 1, 2], 0.2
)
self.assertAlmostEqualOnAllArrayTypes(
first_location_of_minimum, [1, 1, 1, 1, 1], 0.0
)
self.assertAlmostEqualOnAllArrayTypes(first_location_of_minimum, [1], 0.0)
self.assertIsNanOnAllArrayTypes(first_location_of_minimum, []) | blue-yonder/tsfresh | [
7135,
1120,
7135,
61,
1477481357
] |
def test_ratio_of_doubled_values(self):
self.assertAlmostEqualOnAllArrayTypes(
percentage_of_reoccurring_values_to_all_values, [1, 1, 2, 3, 4], 0.25
)
self.assertAlmostEqualOnAllArrayTypes(
percentage_of_reoccurring_values_to_all_values, [1, 1.5, 2, 3], 0
)
self.assertAlmostEqualOnAllArrayTypes(
percentage_of_reoccurring_values_to_all_values, [1], 0
)
self.assertAlmostEqualOnAllArrayTypes(
percentage_of_reoccurring_values_to_all_values,
[1.111, -2.45, 1.111, 2.45],
1.0 / 3.0,
)
self.assertIsNanOnAllArrayTypes(
percentage_of_reoccurring_values_to_all_values, []
) | blue-yonder/tsfresh | [
7135,
1120,
7135,
61,
1477481357
] |
def test_sum_of_reoccurring_data_points(self):
self.assertAlmostEqualOnAllArrayTypes(
sum_of_reoccurring_data_points, [1, 1, 2, 3, 4, 4], 10
)
self.assertAlmostEqualOnAllArrayTypes(
sum_of_reoccurring_data_points, [1, 1.5, 2, 3], 0
)
self.assertAlmostEqualOnAllArrayTypes(sum_of_reoccurring_data_points, [1], 0)
self.assertAlmostEqualOnAllArrayTypes(
sum_of_reoccurring_data_points, [1.111, -2.45, 1.111, 2.45], 2.222
)
self.assertAlmostEqualOnAllArrayTypes(sum_of_reoccurring_data_points, [], 0) | blue-yonder/tsfresh | [
7135,
1120,
7135,
61,
1477481357
] |
def test_fft_coefficient(self):
x = range(10)
param = [
{"coeff": 0, "attr": "real"},
{"coeff": 1, "attr": "real"},
{"coeff": 2, "attr": "real"},
{"coeff": 0, "attr": "imag"},
{"coeff": 1, "attr": "imag"},
{"coeff": 2, "attr": "imag"},
{"coeff": 0, "attr": "angle"},
{"coeff": 1, "attr": "angle"},
{"coeff": 2, "attr": "angle"},
{"coeff": 0, "attr": "abs"},
{"coeff": 1, "attr": "abs"},
{"coeff": 2, "attr": "abs"},
]
expected_index = [
'attr_"real"__coeff_0',
'attr_"real"__coeff_1',
'attr_"real"__coeff_2',
'attr_"imag"__coeff_0',
'attr_"imag"__coeff_1',
'attr_"imag"__coeff_2',
'attr_"angle"__coeff_0',
'attr_"angle"__coeff_1',
'attr_"angle"__coeff_2',
'attr_"abs"__coeff_0',
'attr_"abs"__coeff_1',
'attr_"abs"__coeff_2',
]
res = pd.Series(dict(fft_coefficient(x, param)))
self.assertCountEqual(list(res.index), expected_index)
self.assertAlmostEqual(res['attr_"imag"__coeff_0'], 0, places=6)
self.assertAlmostEqual(res['attr_"real"__coeff_0'], sum(x), places=6)
self.assertAlmostEqual(res['attr_"angle"__coeff_0'], 0, places=6)
self.assertAlmostEqual(res['attr_"abs"__coeff_0'], sum(x), places=6)
x = [0, 1, 0, 0]
res = pd.Series(dict(fft_coefficient(x, param)))
# see documentation of fft in numpy
# should return array([1. + 0.j, 0. - 1.j, -1. + 0.j])
self.assertAlmostEqual(res['attr_"imag"__coeff_0'], 0, places=6)
self.assertAlmostEqual(res['attr_"real"__coeff_0'], 1, places=6)
self.assertAlmostEqual(res['attr_"imag"__coeff_1'], -1, places=6)
self.assertAlmostEqual(res['attr_"angle"__coeff_1'], -90, places=6)
self.assertAlmostEqual(res['attr_"real"__coeff_1'], 0, places=6)
self.assertAlmostEqual(res['attr_"imag"__coeff_2'], 0, places=6)
self.assertAlmostEqual(res['attr_"real"__coeff_2'], -1, places=6)
# test what happens if coeff is biger than time series lenght
x = range(5)
param = [{"coeff": 10, "attr": "real"}]
expected_index = ['attr_"real"__coeff_10']
res = pd.Series(dict(fft_coefficient(x, param)))
self.assertCountEqual(list(res.index), expected_index)
self.assertIsNaN(res['attr_"real"__coeff_10']) | blue-yonder/tsfresh | [
7135,
1120,
7135,
61,
1477481357
] |
def normal(y, mean_, sigma_):
return (
1
/ (2 * np.pi * sigma_ ** 2)
* np.exp(-((y - mean_) ** 2) / (2 * sigma_ ** 2))
) | blue-yonder/tsfresh | [
7135,
1120,
7135,
61,
1477481357
] |
def test_number_peaks(self):
x = np.array([0, 1, 2, 1, 0, 1, 2, 3, 4, 5, 4, 3, 2, 1])
self.assertEqualOnAllArrayTypes(number_peaks, x, 2, 1)
self.assertEqualOnAllArrayTypes(number_peaks, x, 2, 2)
self.assertEqualOnAllArrayTypes(number_peaks, x, 1, 3)
self.assertEqualOnAllArrayTypes(number_peaks, x, 1, 4)
self.assertEqualOnAllArrayTypes(number_peaks, x, 0, 5)
self.assertEqualOnAllArrayTypes(number_peaks, x, 0, 6) | blue-yonder/tsfresh | [
7135,
1120,
7135,
61,
1477481357
] |
def test_number_cwt_peaks(self):
x = [1, 1, 1, 1, 1, 1, 1, 5, 1, 1, 1, 1, 1, 1, 5, 1, 1, 1, 1, 1, 1]
self.assertEqualOnAllArrayTypes(number_cwt_peaks, x, 2, 2) | blue-yonder/tsfresh | [
7135,
1120,
7135,
61,
1477481357
] |
def test_cwt_coefficients(self):
x = [0.1, 0.2, 0.3]
param = [
{"widths": (1, 2, 3), "coeff": 2, "w": 1},
{"widths": (1, 3), "coeff": 2, "w": 3},
{"widths": (1, 3), "coeff": 5, "w": 3},
]
shuffle(param)
expected_index = [
"coeff_2__w_1__widths_(1, 2, 3)",
"coeff_2__w_3__widths_(1, 3)",
"coeff_5__w_3__widths_(1, 3)",
]
res = cwt_coefficients(x, param)
res = pd.Series(dict(res))
# todo: add unit test for the values
self.assertCountEqual(list(res.index), expected_index)
self.assertTrue(math.isnan(res["coeff_5__w_3__widths_(1, 3)"])) | blue-yonder/tsfresh | [
7135,
1120,
7135,
61,
1477481357
] |
def test_time_reversal_asymmetry_statistic(self):
x = [1] * 10
self.assertAlmostEqualOnAllArrayTypes(
time_reversal_asymmetry_statistic, x, 0, 0
)
self.assertAlmostEqualOnAllArrayTypes(
time_reversal_asymmetry_statistic, x, 0, 1
)
self.assertAlmostEqualOnAllArrayTypes(
time_reversal_asymmetry_statistic, x, 0, 2
)
self.assertAlmostEqualOnAllArrayTypes(
time_reversal_asymmetry_statistic, x, 0, 3
)
x = [1, 2, -3, 4]
# 1/2 * ( (4^2 * -3 + 3 * 2^2) + (3^2*2)-(2*1^1)) = 1/2 * (-48+12+18-2) = 20/2
self.assertAlmostEqualOnAllArrayTypes(
time_reversal_asymmetry_statistic, x, -10, 1
)
self.assertAlmostEqualOnAllArrayTypes(
time_reversal_asymmetry_statistic, x, 0, 2
)
self.assertAlmostEqualOnAllArrayTypes(
time_reversal_asymmetry_statistic, x, 0, 3
) | blue-yonder/tsfresh | [
7135,
1120,
7135,
61,
1477481357
] |
def test_c3(self):
x = [1] * 10
self.assertAlmostEqualOnAllArrayTypes(c3, x, 1, 0)
self.assertAlmostEqualOnAllArrayTypes(c3, x, 1, 1)
self.assertAlmostEqualOnAllArrayTypes(c3, x, 1, 2)
self.assertAlmostEqualOnAllArrayTypes(c3, x, 1, 3)
x = [1, 2, -3, 4]
# 1/2 *(1*2*(-3)+2*(-3)*4) = 1/2 *(-6-24) = -30/2
self.assertAlmostEqualOnAllArrayTypes(c3, x, -15, 1)
self.assertAlmostEqualOnAllArrayTypes(c3, x, 0, 2)
self.assertAlmostEqualOnAllArrayTypes(c3, x, 0, 3) | blue-yonder/tsfresh | [
7135,
1120,
7135,
61,
1477481357
] |
def test_sample_entropy(self):
# "random" list -> large entropy
ts = [
1,
4,
5,
1,
7,
3,
1,
2,
5,
8,
9,
7,
3,
7,
9,
5,
4,
3,
9,
1,
2,
3,
4,
2,
9,
6,
7,
4,
9,
2,
9,
9,
6,
5,
1,
3,
8,
1,
5,
3,
8,
4,
1,
2,
2,
1,
6,
5,
3,
6,
5,
4,
8,
9,
6,
7,
5,
3,
2,
5,
4,
2,
5,
1,
6,
5,
3,
5,
6,
7,
8,
5,
2,
8,
6,
3,
8,
2,
7,
1,
7,
3,
5,
6,
2,
1,
3,
7,
3,
5,
3,
7,
6,
7,
7,
2,
3,
1,
7,
8,
]
self.assertAlmostEqualOnAllArrayTypes(sample_entropy, ts, 2.38262780)
# This is not very complex, so it gives a small value
ts = [1, 1, 1, 1, 1, 1, 1, 1, 1, 1]
self.assertAlmostEqualOnAllArrayTypes(sample_entropy, ts, 0.25131442)
# however adding a 2 increases complexity
ts = [1, 1, 2, 1, 1, 1, 1, 1, 1, 1]
self.assertAlmostEqualOnAllArrayTypes(sample_entropy, ts, 0.74193734)
# and it does not matter where
ts = [1, 1, 1, 2, 1, 1, 1, 1, 1, 1]
self.assertAlmostEqualOnAllArrayTypes(sample_entropy, ts, 0.74193734)
# negative numbers also work
ts = [1, -1, 1, -1, 1, -1]
self.assertAlmostEqualOnAllArrayTypes(sample_entropy, ts, 0.69314718)
# nan gives nan
ts = [1, -1, 1, np.nan, 1, -1]
self.assertIsNanOnAllArrayTypes(sample_entropy, ts)
# this is not a very "random" list, so it should give a small entropy
ts = list(range(1000))
self.assertAlmostEqualOnAllArrayTypes(sample_entropy, ts, 0.0010314596066622707) | blue-yonder/tsfresh | [
7135,
1120,
7135,
61,
1477481357
] |
def test_quantile(self):
self.assertAlmostEqualOnAllArrayTypes(
quantile, [1, 1, 1, 3, 4, 7, 9, 11, 13, 13], 1.0, 0.2
)
self.assertAlmostEqualOnAllArrayTypes(
quantile, [1, 1, 1, 3, 4, 7, 9, 11, 13, 13], 13, 0.9
)
self.assertAlmostEqualOnAllArrayTypes(
quantile, [1, 1, 1, 3, 4, 7, 9, 11, 13, 13], 13, 1.0
)
self.assertAlmostEqualOnAllArrayTypes(quantile, [1], 1, 0.5)
self.assertIsNanOnAllArrayTypes(quantile, [], 0.5) | blue-yonder/tsfresh | [
7135,
1120,
7135,
61,
1477481357
] |
def test_value_count(self):
self.assertEqualPandasSeriesWrapper(value_count, [1] * 10, 10, value=1)
self.assertEqualPandasSeriesWrapper(value_count, list(range(10)), 1, value=0)
self.assertEqualPandasSeriesWrapper(value_count, [1] * 10, 0, value=0)
self.assertEqualPandasSeriesWrapper(value_count, [np.NaN, 0, 1] * 3, 3, value=0)
self.assertEqualPandasSeriesWrapper(
value_count, [np.NINF, 0, 1] * 3, 3, value=0
)
self.assertEqualPandasSeriesWrapper(
value_count, [np.PINF, 0, 1] * 3, 3, value=0
)
self.assertEqualPandasSeriesWrapper(
value_count, [0.1, 0.2, 0.3] * 3, 3, value=0.2
)
self.assertEqualPandasSeriesWrapper(
value_count, [np.NaN, 0, 1] * 3, 3, value=np.NaN
)
self.assertEqualPandasSeriesWrapper(
value_count, [np.NINF, 0, 1] * 3, 3, value=np.NINF
)
self.assertEqualPandasSeriesWrapper(
value_count, [np.PINF, 0, 1] * 3, 3, value=np.PINF
) | blue-yonder/tsfresh | [
7135,
1120,
7135,
61,
1477481357
] |
def test_approximate_entropy(self):
self.assertEqualOnAllArrayTypes(approximate_entropy, [1], 0, m=2, r=0.5)
self.assertEqualOnAllArrayTypes(approximate_entropy, [1, 2], 0, m=2, r=0.5)
self.assertEqualOnAllArrayTypes(approximate_entropy, [1, 2, 3], 0, m=2, r=0.5)
self.assertEqualOnAllArrayTypes(approximate_entropy, [1, 2, 3], 0, m=2, r=0.5)
self.assertAlmostEqualOnAllArrayTypes(
approximate_entropy, [12, 13, 15, 16, 17] * 10, 0.282456191, m=2, r=0.9
)
self.assertRaises(
ValueError, approximate_entropy, x=[12, 13, 15, 16, 17] * 10, m=2, r=-0.5
) | blue-yonder/tsfresh | [
7135,
1120,
7135,
61,
1477481357
] |
def test_max_langevin_fixed_point(self):
"""
Estimating the intrinsic velocity of a dissipative soliton
"""
default_params = {"m": 3, "r": 30}
# active Brownian motion
ds = velocity(tau=3.8, delta_t=0.05, R=3e-4, seed=0)
v = ds.simulate(100000, v0=np.zeros(1))
v0 = max_langevin_fixed_point(v[:, 0], **default_params)
self.assertLess(abs(ds.deterministic - v0), 0.001)
# Brownian motion
ds = velocity(tau=2.0 / 0.3 - 3.8, delta_t=0.05, R=3e-4, seed=0)
v = ds.simulate(10000, v0=np.zeros(1))
v0 = max_langevin_fixed_point(v[:, 0], **default_params)
self.assertLess(v0, 0.001) | blue-yonder/tsfresh | [
7135,
1120,
7135,
61,
1477481357
] |
def test__aggregate_on_chunks(self):
self.assertListEqual(
_aggregate_on_chunks(x=pd.Series([0, 1, 2, 3]), f_agg="max", chunk_len=2),
[1, 3],
)
self.assertListEqual(
_aggregate_on_chunks(x=pd.Series([1, 1, 3, 3]), f_agg="max", chunk_len=2),
[1, 3],
)
self.assertListEqual(
_aggregate_on_chunks(x=pd.Series([0, 1, 2, 3]), f_agg="min", chunk_len=2),
[0, 2],
)
self.assertListEqual(
_aggregate_on_chunks(
x=pd.Series([0, 1, 2, 3, 5]), f_agg="min", chunk_len=2
),
[0, 2, 5],
)
self.assertListEqual(
_aggregate_on_chunks(x=pd.Series([0, 1, 2, 3]), f_agg="mean", chunk_len=2),
[0.5, 2.5],
)
self.assertListEqual(
_aggregate_on_chunks(
x=pd.Series([0, 1, 0, 4, 5]), f_agg="mean", chunk_len=2
),
[0.5, 2, 5],
)
self.assertListEqual(
_aggregate_on_chunks(
x=pd.Series([0, 1, 0, 4, 5]), f_agg="mean", chunk_len=3
),
[1 / 3, 4.5],
)
self.assertListEqual(
_aggregate_on_chunks(
x=pd.Series([0, 1, 2, 3, 5, -2]), f_agg="median", chunk_len=2
),
[0.5, 2.5, 1.5],
)
self.assertListEqual(
_aggregate_on_chunks(
x=pd.Series([-10, 5, 3, -3, 4, -6]), f_agg="median", chunk_len=3
),
[3, -3],
)
self.assertListEqual(
_aggregate_on_chunks(
x=pd.Series([0, 1, 2, np.NaN, 5]), f_agg="median", chunk_len=2
),
[0.5, 2, 5],
) | blue-yonder/tsfresh | [
7135,
1120,
7135,
61,
1477481357
] |
def test_energy_ratio_by_chunks(self):
x = pd.Series(range(90), index=range(90))
param = [{"num_segments": 6, "segment_focus": i} for i in range(6)]
output = energy_ratio_by_chunks(x=x, param=param)
self.assertAlmostEqual(output[0][1], 0.0043, places=3)
self.assertAlmostEqual(output[1][1], 0.0316, places=3)
self.assertAlmostEqual(output[2][1], 0.0871, places=3)
self.assertAlmostEqual(output[3][1], 0.1709, places=3)
self.assertAlmostEqual(output[4][1], 0.2829, places=3)
self.assertAlmostEqual(output[5][1], 0.4232, places=3)
# Sum of the ratios should be 1.0
sum = 0.0
for name, dat in output:
sum = sum + dat
self.assertAlmostEqual(sum, 1.0)
x = pd.Series(1, index=range(10))
param = [{"num_segments": 3, "segment_focus": i} for i in range(3)]
output = energy_ratio_by_chunks(x=x, param=param)
self.assertAlmostEqual(output[0][1], 0.4, places=3)
self.assertAlmostEqual(output[1][1], 0.3, places=3)
self.assertAlmostEqual(output[2][1], 0.3, places=3)
# Sum of the ratios should be 1.0
sum = 0.0
for name, dat in output:
sum = sum + dat
self.assertAlmostEqual(sum, 1.0)
x = pd.Series(0, index=range(10))
param = [{"num_segments": 3, "segment_focus": i} for i in range(3)]
output = energy_ratio_by_chunks(x=x, param=param)
self.assertIsNaN(output[0][1])
self.assertIsNaN(output[1][1])
self.assertIsNaN(output[2][1]) | blue-yonder/tsfresh | [
7135,
1120,
7135,
61,
1477481357
] |
def test_linear_trend_timewise_days(self):
"""Test linear_trend_timewise function with day intervals."""
# Try with different days
x = pd.Series(
[0, 24, 48, 72],
index=pd.DatetimeIndex(
[
"2018-01-01 04:00:00",
"2018-01-02 04:00:00",
"2018-01-03 04:00:00",
"2018-01-04 04:00:00",
]
),
)
param = [
{"attr": "pvalue"},
{"attr": "rvalue"},
{"attr": "intercept"},
{"attr": "slope"},
{"attr": "stderr"},
]
res = linear_trend_timewise(x, param)
res = pd.Series(dict(res))
self.assertAlmostEqual(res['attr_"pvalue"'], 0, places=3)
self.assertAlmostEqual(res['attr_"stderr"'], 0, places=3)
self.assertAlmostEqual(res['attr_"intercept"'], 0, places=3)
self.assertAlmostEqual(res['attr_"slope"'], 1.0, places=3) | blue-yonder/tsfresh | [
7135,
1120,
7135,
61,
1477481357
] |
def test_linear_trend_timewise_years(self):
"""Test linear_trend_timewise function with year intervals."""
# Try with different days
x = pd.Series(
[
0,
365 * 24,
365 * 48,
365 * 72 + 24,
], # Add 24 to the last one since it's a leap year
index=pd.DatetimeIndex(
[
"2018-01-01 04:00:00",
"2019-01-01 04:00:00",
"2020-01-01 04:00:00",
"2021-01-01 04:00:00",
]
),
)
param = [
{"attr": "pvalue"},
{"attr": "rvalue"},
{"attr": "intercept"},
{"attr": "slope"},
{"attr": "stderr"},
]
res = linear_trend_timewise(x, param)
res = pd.Series(dict(res))
self.assertAlmostEqual(res['attr_"pvalue"'], 0, places=3)
self.assertAlmostEqual(res['attr_"stderr"'], 0, places=3)
self.assertAlmostEqual(res['attr_"intercept"'], 0, places=3)
self.assertAlmostEqual(res['attr_"slope"'], 1.0, places=3) | blue-yonder/tsfresh | [
7135,
1120,
7135,
61,
1477481357
] |
def test_count_above(self):
self.assertEqualPandasSeriesWrapper(count_above, [1] * 10, 1, t=1)
self.assertEqualPandasSeriesWrapper(count_above, list(range(10)), 1, t=0)
self.assertEqualPandasSeriesWrapper(count_above, list(range(10)), 0.5, t=5)
self.assertEqualPandasSeriesWrapper(
count_above, [0.1, 0.2, 0.3] * 3, 2 / 3, t=0.2
)
self.assertEqualPandasSeriesWrapper(count_above, [np.NaN, 0, 1] * 3, 2 / 3, t=0)
self.assertEqualPandasSeriesWrapper(
count_above, [np.NINF, 0, 1] * 3, 2 / 3, t=0
)
self.assertEqualPandasSeriesWrapper(count_above, [np.PINF, 0, 1] * 3, 1, t=0)
self.assertEqualPandasSeriesWrapper(
count_above, [np.NaN, 0, 1] * 3, 0, t=np.NaN
)
self.assertEqualPandasSeriesWrapper(
count_above, [np.NINF, 0, np.PINF] * 3, 1, t=np.NINF
)
self.assertEqualPandasSeriesWrapper(
count_above, [np.PINF, 0, 1] * 3, 1 / 3, t=np.PINF
) | blue-yonder/tsfresh | [
7135,
1120,
7135,
61,
1477481357
] |
def test_benford_correlation(self):
# A test with list of random values
np.random.seed(42)
random_list = np.random.uniform(size=100)
# Fibonacci series is known to match the Newcomb-Benford's Distribution
fibonacci_list = [0, 1]
for i in range(2, 200):
fibonacci_list.append(fibonacci_list[i - 1] + fibonacci_list[i - 2])
# A list of equally distributed digits (returns NaN)
equal_list = [1, 2, 3, 4, 5, 6, 7, 8, 9]
# A list containing NaN
list_with_nan = [
1.354,
0.058,
0.055,
0.99,
3.15,
np.nan,
0.3,
2.3,
0,
0.59,
0.74,
]
self.assertAlmostEqual(benford_correlation(random_list), 0.39458056)
self.assertAlmostEqual(benford_correlation(fibonacci_list), 0.998003988)
self.assertAlmostEqual(benford_correlation(list_with_nan), 0.10357511)
self.assertIsNaN(benford_correlation(equal_list)) | blue-yonder/tsfresh | [
7135,
1120,
7135,
61,
1477481357
] |
def test_matrix_profile_window(self):
# Test matrix profile output with specified window
np.random.seed(9999)
ts = np.random.uniform(size=2 ** 10)
w = 2 ** 5
subq = ts[0:w]
ts[0:w] = subq
ts[w + 100 : w + 100 + w] = subq
param = [
{"threshold": 0.98, "windows": 36, "feature": "min"},
{"threshold": 0.98, "windows": 36, "feature": "max"},
{"threshold": 0.98, "windows": 36, "feature": "mean"},
{"threshold": 0.98, "windows": 36, "feature": "median"},
{"threshold": 0.98, "windows": 36, "feature": "25"},
{"threshold": 0.98, "windows": 36, "feature": "75"},
]
self.assertAlmostEqual(matrix_profile(ts, param=param)[0][1], 2.825786727580335) | blue-yonder/tsfresh | [
7135,
1120,
7135,
61,
1477481357
] |
def test_matrix_profile_nan(self):
# Test matrix profile of NaNs (NaN output)
ts = np.random.uniform(size=2 ** 6)
ts[:] = np.nan
param = [
{"threshold": 0.98, "windows": None, "feature": "min"},
{"threshold": 0.98, "windows": None, "feature": "max"},
{"threshold": 0.98, "windows": None, "feature": "mean"},
{"threshold": 0.98, "windows": None, "feature": "median"},
{"threshold": 0.98, "windows": None, "feature": "25"},
{"threshold": 0.98, "windows": None, "feature": "75"},
]
self.assertTrue(np.isnan(matrix_profile(ts, param=param)[0][1])) | blue-yonder/tsfresh | [
7135,
1120,
7135,
61,
1477481357
] |
def test_estimate_friedrich_coefficients(self):
"""
Estimate friedrich coefficients
"""
default_params = {"m": 3, "r": 30}
# active Brownian motion
ds = velocity(tau=3.8, delta_t=0.05, R=3e-4, seed=0)
v = ds.simulate(10000, v0=np.zeros(1))
coeff = _estimate_friedrich_coefficients(v[:, 0], **default_params)
self.assertLess(abs(coeff[-1]), 0.0001)
# Brownian motion
ds = velocity(tau=2.0 / 0.3 - 3.8, delta_t=0.05, R=3e-4, seed=0)
v = ds.simulate(10000, v0=np.zeros(1))
coeff = _estimate_friedrich_coefficients(v[:, 0], **default_params)
self.assertLess(abs(coeff[-1]), 0.0001) | blue-yonder/tsfresh | [
7135,
1120,
7135,
61,
1477481357
] |
def test_friedrich_number_of_returned_features_is_equal_to_number_of_parameters(
self, | blue-yonder/tsfresh | [
7135,
1120,
7135,
61,
1477481357
] |
def run_L_D_simulation(self, L, D):
# L = duplication length
# D = number of DCJs in each branch.
#
param = self.sim_parameters
# pre_dups (at root) and post_dups (at branches) to achieve 1.5 genes/family in average.
pre_duplications = int(0.43 * param.num_genes / L)
post_duplications = int(0.07 * param.num_genes / L)
post_duplications = [int(0.5 * post_duplications), int(1.5 * post_duplications)]
# post_duplications = [int(1 * post_duplications), int(1 * post_duplications)]
param.pre_duplications = pre_duplications
current_copy_number = None # will init at root
deletion_length_range = xrange(1, param.indel_length + 1)
duplication_length_range = xrange(1, L + 1)
idx = 1
ev_tree = self.sim_tree
for ev_node in ev_tree.preorder_node_iter():
if ev_node.parent_node is None:
# identity genome:
ev_node.value = current_genome = model.Genome.identity(param.num_genes, param.num_chr)
ev_node.events = {ev: 0 for ev in EventType.all}
# add copy number information to track orthologous/paralogous, when duplications are present:
for chromosome in current_genome.chromosomes:
chromosome.copy_number = [1] * len(chromosome.gene_order)
current_copy_number = current_genome.gene_count()
# pre-duplications:
for i in range(pre_duplications):
Simulation.apply_random_segmental_duplication(current_genome,
range(1, param.duplication_length + 1),
current_copy_number)
ev_node.events[EventType.DUPLICATION] = pre_duplications
# ev_node.edge.length = pre_duplications
if ev_node.label is None:
ev_node.label = "Root"
else:
# evolve genome:
if ev_node.is_internal():
if ev_node.label is None:
ev_node.label = "M%02d" % idx
idx += 1
else: # complete labelling for leaves
ev_node.label = ev_node.taxon.label
current_genome = ev_node.parent_node.value.clone(ev_node.label)
ev_node.value = current_genome
pd = post_duplications.pop()
ev_node.edge.length = D + pd
# events
events = [EventType.DUPLICATION] * pd + [EventType.REARRANGEMENT] * D
ev_node.edge.events = {ev: 0 for ev in EventType.all}
random.shuffle(events)
for event in events:
if event == EventType.DUPLICATION:
Simulation.apply_random_segmental_duplication(current_genome, duplication_length_range, current_copy_number)
ev_node.edge.events[event] += 1
elif event == EventType.REARRANGEMENT:
# here, I can also have deletions:
ev = np.random.choice([RearrangementType.REVERSAL, EventType.DELETION], 1,
p=[param.rearrangement_p, param.deletion_p])[0]
if ev == RearrangementType.REVERSAL:
Simulation.apply_random_reversal(current_genome)
ev_node.edge.events[event] += 1
else:
Simulation.apply_random_deletion(current_genome, deletion_length_range)
ev_node.edge.events[EventType.DELETION] += 1
ev_node.events = {ev: ev_node.parent_node.events[ev] + count for ev, count in
ev_node.edge.events.iteritems()} | pedrofeijao/RINGO | [
4,
1,
4,
2,
1467712417
] |
def __init__(self, handler_):
assert callable(handler_)
super().__init__()
self._handler = handler_
self._event = threading.Event()
self._thread = None | uniflex/uniflex | [
3,
2,
3,
1,
1478094848
] |
def cancel(self):
if (not self._thread) or (not self._thread.is_alive()):
return
self._event.set()
# self._thread.join()
self._thread = None | uniflex/uniflex | [
3,
2,
3,
1,
1478094848
] |
def _timer(self, interval):
# Avoid cancellation during execution of self._callable()
cancel = self._event.wait(interval)
if cancel:
return
self._handler() | uniflex/uniflex | [
3,
2,
3,
1,
1478094848
] |
def __init__(self, app, ev_cls):
super(TimerEventSender, self).__init__(self._timeout)
self._app = app
self._ev_cls = ev_cls | uniflex/uniflex | [
3,
2,
3,
1,
1478094848
] |
def __init__(self, credentials):
output.startup_message(credentials)
self.credentials = credentials
self.reddit = self.connect()
self.NUM_POSTS = 20 | tylerbrockett/reddit-bot-buildapcsales | [
117,
10,
117,
4,
1447791069
] |
def disconnect(self):
self.reddit = None | tylerbrockett/reddit-bot-buildapcsales | [
117,
10,
117,
4,
1447791069
] |
def get_instance(self):
return self.reddit | tylerbrockett/reddit-bot-buildapcsales | [
117,
10,
117,
4,
1447791069
] |
def get_message(self, message_id):
return self.reddit.inbox.message(message_id) | tylerbrockett/reddit-bot-buildapcsales | [
117,
10,
117,
4,
1447791069
] |
def get_submissions(self, subreddit):
submissions = []
posts = 200 if (subreddit == 'all') else self.NUM_POSTS
try:
subs = self.reddit.subreddit(subreddit).new(limit=posts)
for submission in subs:
submissions.append(submission)
except Forbidden as e:
Logger.log(traceback.format_exc(), Color.RED)
return []
except Exception as e:
Logger.log(traceback.format_exc(), Color.RED)
raise RedditHelperException(RedditHelperException.GET_SUBMISSIONS_EXCEPTION)
return submissions | tylerbrockett/reddit-bot-buildapcsales | [
117,
10,
117,
4,
1447791069
] |
def check_invalid_subreddits(self, subreddits):
invalid = []
for subreddit in subreddits:
try:
for submission in self.reddit.subreddit(subreddit).new(limit=1):
print('subreddit is valid')
except Redirect: # was praw.errors.InvalidSubreddit without 'len()' around call in the try block
Logger.log(traceback.format_exc(), Color.RED)
invalid.append(subreddit)
return invalid | tylerbrockett/reddit-bot-buildapcsales | [
117,
10,
117,
4,
1447791069
] |
def test_pack():
assert pwny.pack('I', 0x41424344) == b'DCBA' | edibledinos/pwnypack | [
120,
28,
120,
1,
1426724447
] |
def test_pack_explicit_endian():
assert pwny.pack('I', 0x41424344, endian=pwny.Target.Endian.big) == b'ABCD' | edibledinos/pwnypack | [
120,
28,
120,
1,
1426724447
] |
def test_pack_invalid_endian():
pwny.pack('I', 1, endian='invalid') | edibledinos/pwnypack | [
120,
28,
120,
1,
1426724447
] |
def test_unpack_format_with_endian():
assert pwny.unpack('>I', b'ABCD') == (0x41424344,) | edibledinos/pwnypack | [
120,
28,
120,
1,
1426724447
] |
def test_unpack_explicit_target():
assert pwny.unpack('I', b'ABCD', target=target_big_endian) == (0x41424344,) | edibledinos/pwnypack | [
120,
28,
120,
1,
1426724447
] |
def test_unpack_invalid_endian():
pwny.unpack('I', 'AAAA', endian='invalid') | edibledinos/pwnypack | [
120,
28,
120,
1,
1426724447
] |
def test_short_form_pack():
for width, num, bytestr in short_signed_data:
f = 'p%d' % width
yield check_short_form_pack, f, num, bytestr[::-1]
yield check_short_form_pack_endian, f, num, bytestr[::-1], pwny.Target.Endian.little
yield check_short_form_pack_endian, f, num, bytestr, pwny.Target.Endian.big
for width, num, bytestr in short_unsigned_data:
f = 'P%d' % width
yield check_short_form_pack, f, num, bytestr[::-1]
yield check_short_form_pack_endian, f, num, bytestr[::-1], pwny.Target.Endian.little
yield check_short_form_pack_endian, f, num, bytestr, pwny.Target.Endian.big | edibledinos/pwnypack | [
120,
28,
120,
1,
1426724447
] |
def test_pointer_pack():
yield check_short_form_pack, 'p', -66052, b'\xfc\xfd\xfe\xff'
yield check_short_form_pack_endian, 'p', -66052, b'\xfc\xfd\xfe\xff', pwny.Target.Endian.little
yield check_short_form_pack_endian, 'p', -66052, b'\xff\xfe\xfd\xfc', pwny.Target.Endian.big
yield check_short_form_pack, 'P', 4294901244, b'\xfc\xfd\xfe\xff'
yield check_short_form_pack_endian, 'P', 4294901244, b'\xfc\xfd\xfe\xff', pwny.Target.Endian.little
yield check_short_form_pack_endian, 'P', 4294901244, b'\xff\xfe\xfd\xfc', pwny.Target.Endian.big | edibledinos/pwnypack | [
120,
28,
120,
1,
1426724447
] |
def check_short_form_pack(f, num, bytestr):
assert getattr(pwny, f)(num) == bytestr | edibledinos/pwnypack | [
120,
28,
120,
1,
1426724447
] |
def check_short_form_unpack(f, num, bytestr):
assert getattr(pwny, f)(bytestr) == num | edibledinos/pwnypack | [
120,
28,
120,
1,
1426724447
] |
def test_make_request_timeout():
"""
Remote calls should time out
"""
httpretty.register_uri(httpretty.GET, "www.example.com",
body=None,
)
# When I make an API request and receive no response
c = BaseClient()
# Then I should raise a NewRelicApiException
c._make_request.when.called_with(requests.get,
"http://www.example.com",
timeout=0.05,
retries=1)\
.should.throw(requests.RequestException) | andrewgross/pyrelic | [
21,
12,
21,
2,
1333416631
] |
def test_make_request_non_200():
"""
Bad HTTP Responses should throw an error
"""
httpretty.register_uri(httpretty.GET, "http://foobar.com",
body="123", status=400)
# When I make an API request and receive a 400
c = BaseClient()
# Then I should raise the appropriate requests exception
c._make_request.when.called_with(requests.get,
"http://foobar.com")\
.should.throw(requests.RequestException) | andrewgross/pyrelic | [
21,
12,
21,
2,
1333416631
] |
def __init__(self, order, dot=True, **kwargs):
self.order = order
self._dot = dot
super(FixedPermutation, self).__init__(**kwargs) | mila-udem/blocks-extras | [
27,
40,
27,
9,
1430419450
] |
def input_dim(self):
return len(self.order) | mila-udem/blocks-extras | [
27,
40,
27,
9,
1430419450
] |
def apply(self, input_):
if self._dot:
return tensor.dot(input_, self._matrix)
else:
return tensor.take(input_, self._permutation, axis=1) | mila-udem/blocks-extras | [
27,
40,
27,
9,
1430419450
] |
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def list_locations(
self,
subscription_id: str,
**kwargs: Any | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def prepare_request(next_link=None):
if not next_link: | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def list(
self,
**kwargs: Any | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def prepare_request(next_link=None):
if not next_link: | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def build_list_by_subscription_request(
subscription_id: str,
*,
filter: Optional[str] = None,
**kwargs: Any | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def build_list_by_resource_group_request(
subscription_id: str,
resource_group_name: str,
**kwargs: Any | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def build_get_request(
subscription_id: str,
resource_group_name: str,
lab_name: str,
**kwargs: Any | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def build_create_or_update_request_initial(
subscription_id: str,
resource_group_name: str,
lab_name: str,
*,
json: JSONType = None,
content: Any = None,
**kwargs: Any | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def build_update_request_initial(
subscription_id: str,
resource_group_name: str,
lab_name: str,
*,
json: JSONType = None,
content: Any = None,
**kwargs: Any | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def build_delete_request_initial(
subscription_id: str,
resource_group_name: str,
lab_name: str,
**kwargs: Any | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def build_publish_request_initial(
subscription_id: str,
resource_group_name: str,
lab_name: str,
**kwargs: Any | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def build_sync_group_request_initial(
subscription_id: str,
resource_group_name: str,
lab_name: str,
**kwargs: Any | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def list_by_subscription(
self,
filter: Optional[str] = None,
**kwargs: Any | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def prepare_request(next_link=None):
if not next_link: | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def extract_data(pipeline_response):
deserialized = self._deserialize("PagedLabs", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem) | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def list_by_resource_group(
self,
resource_group_name: str,
**kwargs: Any | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def prepare_request(next_link=None):
if not next_link: | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def extract_data(pipeline_response):
deserialized = self._deserialize("PagedLabs", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem) | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def get(
self,
resource_group_name: str,
lab_name: str,
**kwargs: Any | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def _create_or_update_initial(
self,
resource_group_name: str,
lab_name: str,
body: "_models.Lab",
**kwargs: Any | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def begin_create_or_update(
self,
resource_group_name: str,
lab_name: str,
body: "_models.Lab",
**kwargs: Any | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
deserialized = self._deserialize('Lab', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def _update_initial(
self,
resource_group_name: str,
lab_name: str,
body: "_models.LabUpdate",
**kwargs: Any | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def begin_update(
self,
resource_group_name: str,
lab_name: str,
body: "_models.LabUpdate",
**kwargs: Any | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
deserialized = self._deserialize('Lab', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def _delete_initial(
self,
resource_group_name: str,
lab_name: str,
**kwargs: Any | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def begin_delete(
self,
resource_group_name: str,
lab_name: str,
**kwargs: Any | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {}) | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def _publish_initial(
self,
resource_group_name: str,
lab_name: str,
**kwargs: Any | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def begin_publish(
self,
resource_group_name: str,
lab_name: str,
**kwargs: Any | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {}) | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.